incr.comp.: Compute hashes of all query results. · rust-lang/rust@0cf6000 (original) (raw)
`@@ -13,9 +13,10 @@ use hir::def_id::DefId;
`
13
13
`use hir::map::DefPathHash;
`
14
14
`use ich::{self, CachingCodemapView};
`
15
15
`use session::config::DebugInfoLevel::NoDebugInfo;
`
16
``
`-
use ty;
`
17
``
`-
use util::nodemap::{NodeMap, ItemLocalMap};
`
``
16
`+
use ty::{self, fast_reject};
`
``
17
`+
use util::nodemap::{NodeMap, NodeSet, ItemLocalMap};
`
18
18
``
``
19
`+
use std::cmp::Ord;
`
19
20
`use std::hash as std_hash;
`
20
21
`use std::collections::{HashMap, HashSet, BTreeMap};
`
21
22
``
`@@ -47,6 +48,7 @@ pub struct StableHashingContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
`
47
48
`#[derive(PartialEq, Eq, Clone, Copy)]
`
48
49
`pub enum NodeIdHashingMode {
`
49
50
`Ignore,
`
``
51
`+
CheckedIgnore,
`
50
52
`HashDefPath,
`
51
53
`HashTraitsInScope,
`
52
54
`}
`
`@@ -148,7 +150,7 @@ impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
`
148
150
`self.overflow_checks_enabled = true;
`
149
151
`}
`
150
152
`let prev_hash_node_ids = self.node_id_hashing_mode;
`
151
``
`-
self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
`
``
153
`+
self.node_id_hashing_mode = NodeIdHashingMode::CheckedIgnore;
`
152
154
``
153
155
`f(self);
`
154
156
``
`@@ -201,6 +203,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::N
`
201
203
`hasher: &mut StableHasher) {
`
202
204
`match hcx.node_id_hashing_mode {
`
203
205
`NodeIdHashingMode::Ignore => {
`
``
206
`+
// Don't do anything.
`
``
207
`+
}
`
``
208
`+
NodeIdHashingMode::CheckedIgnore => {
`
204
209
`// Most NodeIds in the HIR can be ignored, but if there is a
`
205
210
`` // corresponding entry in the trait_map
we need to hash that.
``
206
211
`// Make sure we don't ignore too much by checking that there is
`
`@@ -322,7 +327,7 @@ pub fn hash_stable_hashmap<'a, 'gcx, 'tcx, K, V, R, SK, F, W>(
`
322
327
`let mut keys: Vec<_> = map.keys()
`
323
328
`.map(|k| (extract_stable_key(hcx, k), k))
`
324
329
`.collect();
`
325
``
`-
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
`
``
330
`+
keys.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
`
326
331
` keys.len().hash_stable(hcx, hasher);
`
327
332
`for (stable_key, key) in keys {
`
328
333
` stable_key.hash_stable(hcx, hasher);
`
`@@ -355,8 +360,25 @@ pub fn hash_stable_nodemap<'a, 'tcx, 'gcx, V, W>(
`
355
360
`where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
`
356
361
`W: StableHasherResult,
`
357
362
`{
`
358
``
`-
hash_stable_hashmap(hcx, hasher, map, |hcx, node_id| {
`
359
``
`-
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
`
``
363
`+
let definitions = hcx.tcx.hir.definitions();
`
``
364
`+
hash_stable_hashmap(hcx, hasher, map, |_, node_id| {
`
``
365
`+
let hir_id = definitions.node_to_hir_id(*node_id);
`
``
366
`+
let owner_def_path_hash = definitions.def_path_hash(hir_id.owner);
`
``
367
`+
(owner_def_path_hash, hir_id.local_id)
`
``
368
`+
});
`
``
369
`+
}
`
``
370
+
``
371
`+
pub fn hash_stable_nodeset<'a, 'tcx, 'gcx, W>(
`
``
372
`+
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
`
``
373
`+
hasher: &mut StableHasher,
`
``
374
`+
map: &NodeSet)
`
``
375
`+
where W: StableHasherResult,
`
``
376
`+
{
`
``
377
`+
let definitions = hcx.tcx.hir.definitions();
`
``
378
`+
hash_stable_hashset(hcx, hasher, map, |_, node_id| {
`
``
379
`+
let hir_id = definitions.node_to_hir_id(*node_id);
`
``
380
`+
let owner_def_path_hash = definitions.def_path_hash(hir_id.owner);
`
``
381
`+
(owner_def_path_hash, hir_id.local_id)
`
360
382
`});
`
361
383
`}
`
362
384
``
`@@ -387,10 +409,56 @@ pub fn hash_stable_btreemap<'a, 'tcx, 'gcx, K, V, SK, F, W>(
`
387
409
`let mut keys: Vec<_> = map.keys()
`
388
410
`.map(|k| (extract_stable_key(hcx, k), k))
`
389
411
`.collect();
`
390
``
`-
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
`
``
412
`+
keys.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
`
391
413
` keys.len().hash_stable(hcx, hasher);
`
392
414
`for (stable_key, key) in keys {
`
393
415
` stable_key.hash_stable(hcx, hasher);
`
394
416
` map[key].hash_stable(hcx, hasher);
`
395
417
`}
`
396
418
`}
`
``
419
+
``
420
`+
pub fn hash_stable_trait_impls<'a, 'tcx, 'gcx, W, R>(
`
``
421
`+
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
`
``
422
`+
hasher: &mut StableHasher,
`
``
423
`+
blanket_impls: &Vec,
`
``
424
`+
non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec, R>)
`
``
425
`+
where W: StableHasherResult,
`
``
426
`+
R: std_hash::BuildHasher,
`
``
427
`+
{
`
``
428
`+
{
`
``
429
`+
let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
`
``
430
`+
.iter()
`
``
431
`+
.map(|&def_id| hcx.def_path_hash(def_id))
`
``
432
`+
.collect();
`
``
433
+
``
434
`+
if blanket_impls.len() > 1 {
`
``
435
`+
blanket_impls.sort_unstable();
`
``
436
`+
}
`
``
437
+
``
438
`+
blanket_impls.hash_stable(hcx, hasher);
`
``
439
`+
}
`
``
440
+
``
441
`+
{
`
``
442
`+
let tcx = hcx.tcx();
`
``
443
`+
let mut keys: AccumulateVec<[_; 8]> =
`
``
444
`+
non_blanket_impls.keys()
`
``
445
`+
.map(|k| (k, k.map_def(|d| tcx.def_path_hash(d))))
`
``
446
`+
.collect();
`
``
447
`+
keys.sort_unstable_by(|&(, ref k1), &(, ref k2)| k1.cmp(k2));
`
``
448
`+
keys.len().hash_stable(hcx, hasher);
`
``
449
`+
for (key, ref stable_key) in keys {
`
``
450
`+
stable_key.hash_stable(hcx, hasher);
`
``
451
`+
let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
`
``
452
`+
.iter()
`
``
453
`+
.map(|&impl_id| hcx.def_path_hash(impl_id))
`
``
454
`+
.collect();
`
``
455
+
``
456
`+
if impls.len() > 1 {
`
``
457
`+
impls.sort_unstable();
`
``
458
`+
}
`
``
459
+
``
460
`+
impls.hash_stable(hcx, hasher);
`
``
461
`+
}
`
``
462
`+
}
`
``
463
`+
}
`
``
464
+