rustc_query_system/dep_graph/
serialized.rs1use std::iter;
39use std::marker::PhantomData;
40use std::sync::Arc;
41
42use rustc_data_structures::fingerprint::{Fingerprint, PackedFingerprint};
43use rustc_data_structures::fx::FxHashMap;
44use rustc_data_structures::outline;
45use rustc_data_structures::profiling::SelfProfilerRef;
46use rustc_data_structures::sync::Lock;
47use rustc_data_structures::unhash::UnhashMap;
48use rustc_index::{Idx, IndexVec};
49use rustc_serialize::opaque::{FileEncodeResult, FileEncoder, IntEncodedWithFixedSize, MemDecoder};
50use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
51use tracing::{debug, instrument};
52
53use super::query::DepGraphQuery;
54use super::{DepKind, DepNode, DepNodeIndex, Deps};
55use crate::dep_graph::edges::EdgesVec;
56
57rustc_index::newtype_index! {
61 #[encodable]
62 #[max = 0x7FFF_FFFF]
63 pub struct SerializedDepNodeIndex {}
64}
65
66const DEP_NODE_SIZE: usize = std::mem::size_of::<SerializedDepNodeIndex>();
67const DEP_NODE_PAD: usize = DEP_NODE_SIZE - 1;
70const DEP_NODE_WIDTH_BITS: usize = DEP_NODE_SIZE / 2;
75
76#[derive(Debug, Default)]
78pub struct SerializedDepGraph {
79 nodes: IndexVec<SerializedDepNodeIndex, DepNode>,
81 fingerprints: IndexVec<SerializedDepNodeIndex, Fingerprint>,
84 edge_list_indices: IndexVec<SerializedDepNodeIndex, EdgeHeader>,
88 edge_list_data: Vec<u8>,
91 index: Vec<UnhashMap<PackedFingerprint, SerializedDepNodeIndex>>,
94}
95
96impl SerializedDepGraph {
97 #[inline]
98 pub fn edge_targets_from(
99 &self,
100 source: SerializedDepNodeIndex,
101 ) -> impl Iterator<Item = SerializedDepNodeIndex> + Clone + '_ {
102 let header = self.edge_list_indices[source];
103 let mut raw = &self.edge_list_data[header.start()..];
104 let end = self
107 .edge_list_indices
108 .get(source + 1)
109 .map(|h| h.start())
110 .unwrap_or_else(|| self.edge_list_data.len() - DEP_NODE_PAD);
111
112 let bytes_per_index = header.bytes_per_index();
115 let len = (end - header.start()) / bytes_per_index;
116
117 let mask = header.mask();
119 (0..len).map(move |_| {
120 let index = &raw[..DEP_NODE_SIZE];
123 raw = &raw[bytes_per_index..];
124 let index = u32::from_le_bytes(index.try_into().unwrap()) & mask;
125 SerializedDepNodeIndex::from_u32(index)
126 })
127 }
128
129 #[inline]
130 pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode {
131 self.nodes[dep_node_index]
132 }
133
134 #[inline]
135 pub fn node_to_index_opt(&self, dep_node: &DepNode) -> Option<SerializedDepNodeIndex> {
136 self.index.get(dep_node.kind.as_usize())?.get(&dep_node.hash).cloned()
137 }
138
139 #[inline]
140 pub fn fingerprint_by_index(&self, dep_node_index: SerializedDepNodeIndex) -> Fingerprint {
141 self.fingerprints[dep_node_index]
142 }
143
144 #[inline]
145 pub fn node_count(&self) -> usize {
146 self.nodes.len()
147 }
148}
149
150#[derive(Debug, Clone, Copy)]
155struct EdgeHeader {
156 repr: usize,
157}
158
159impl EdgeHeader {
160 #[inline]
161 fn start(self) -> usize {
162 self.repr >> DEP_NODE_WIDTH_BITS
163 }
164
165 #[inline]
166 fn bytes_per_index(self) -> usize {
167 (self.repr & mask(DEP_NODE_WIDTH_BITS)) + 1
168 }
169
170 #[inline]
171 fn mask(self) -> u32 {
172 mask(self.bytes_per_index() * 8) as u32
173 }
174}
175
176#[inline]
177fn mask(bits: usize) -> usize {
178 usize::MAX >> ((std::mem::size_of::<usize>() * 8) - bits)
179}
180
181impl SerializedDepGraph {
182 #[instrument(level = "debug", skip(d))]
183 pub fn decode<D: Deps>(d: &mut MemDecoder<'_>) -> Arc<SerializedDepGraph> {
184 debug!("position: {:?}", d.position());
186 let (node_count, edge_count) =
187 d.with_position(d.len() - 2 * IntEncodedWithFixedSize::ENCODED_SIZE, |d| {
188 debug!("position: {:?}", d.position());
189 let node_count = IntEncodedWithFixedSize::decode(d).0 as usize;
190 let edge_count = IntEncodedWithFixedSize::decode(d).0 as usize;
191 (node_count, edge_count)
192 });
193 debug!("position: {:?}", d.position());
194
195 debug!(?node_count, ?edge_count);
196
197 let graph_bytes = d.len() - (2 * IntEncodedWithFixedSize::ENCODED_SIZE) - d.position();
198
199 let mut nodes = IndexVec::with_capacity(node_count);
200 let mut fingerprints = IndexVec::with_capacity(node_count);
201 let mut edge_list_indices = IndexVec::with_capacity(node_count);
202 let mut edge_list_data = Vec::with_capacity(
212 graph_bytes - node_count * std::mem::size_of::<SerializedNodeHeader<D>>(),
213 );
214
215 for _index in 0..node_count {
216 let node_header =
219 SerializedNodeHeader::<D> { bytes: d.read_array(), _marker: PhantomData };
220
221 let _i: SerializedDepNodeIndex = nodes.push(node_header.node());
222 debug_assert_eq!(_i.index(), _index);
223
224 let _i: SerializedDepNodeIndex = fingerprints.push(node_header.fingerprint());
225 debug_assert_eq!(_i.index(), _index);
226
227 let num_edges = node_header.len().unwrap_or_else(|| d.read_usize());
230
231 let edges_len_bytes = node_header.bytes_per_index() * num_edges;
235 let edges_header = node_header.edges_header(&edge_list_data);
238
239 edge_list_data.extend(d.read_raw_bytes(edges_len_bytes));
240
241 let _i: SerializedDepNodeIndex = edge_list_indices.push(edges_header);
242 debug_assert_eq!(_i.index(), _index);
243 }
244
245 edge_list_data.extend(&[0u8; DEP_NODE_PAD]);
249
250 let mut index: Vec<_> = (0..(D::DEP_KIND_MAX + 1))
252 .map(|_| UnhashMap::with_capacity_and_hasher(d.read_u32() as usize, Default::default()))
253 .collect();
254
255 for (idx, node) in nodes.iter_enumerated() {
256 index[node.kind.as_usize()].insert(node.hash, idx);
257 }
258
259 Arc::new(SerializedDepGraph {
260 nodes,
261 fingerprints,
262 edge_list_indices,
263 edge_list_data,
264 index,
265 })
266 }
267}
268
269struct SerializedNodeHeader<D> {
278 bytes: [u8; 34],
282 _marker: PhantomData<D>,
283}
284
285struct Unpacked {
288 len: Option<usize>,
289 bytes_per_index: usize,
290 kind: DepKind,
291 hash: PackedFingerprint,
292 fingerprint: Fingerprint,
293}
294
295impl<D: Deps> SerializedNodeHeader<D> {
303 const TOTAL_BITS: usize = std::mem::size_of::<DepKind>() * 8;
304 const LEN_BITS: usize = Self::TOTAL_BITS - Self::KIND_BITS - Self::WIDTH_BITS;
305 const WIDTH_BITS: usize = DEP_NODE_WIDTH_BITS;
306 const KIND_BITS: usize = Self::TOTAL_BITS - D::DEP_KIND_MAX.leading_zeros() as usize;
307 const MAX_INLINE_LEN: usize = (u16::MAX as usize >> (Self::TOTAL_BITS - Self::LEN_BITS)) - 1;
308
309 #[inline]
310 fn new(
311 node: DepNode,
312 fingerprint: Fingerprint,
313 edge_max_index: u32,
314 edge_count: usize,
315 ) -> Self {
316 debug_assert_eq!(Self::TOTAL_BITS, Self::LEN_BITS + Self::WIDTH_BITS + Self::KIND_BITS);
317
318 let mut head = node.kind.as_inner();
319
320 let free_bytes = edge_max_index.leading_zeros() as usize / 8;
321 let bytes_per_index = (DEP_NODE_SIZE - free_bytes).saturating_sub(1);
322 head |= (bytes_per_index as u16) << Self::KIND_BITS;
323
324 if edge_count <= Self::MAX_INLINE_LEN {
327 head |= (edge_count as u16 + 1) << (Self::KIND_BITS + Self::WIDTH_BITS);
328 }
329
330 let hash: Fingerprint = node.hash.into();
331
332 let mut bytes = [0u8; 34];
334 bytes[..2].copy_from_slice(&head.to_le_bytes());
335 bytes[2..18].copy_from_slice(&hash.to_le_bytes());
336 bytes[18..].copy_from_slice(&fingerprint.to_le_bytes());
337
338 #[cfg(debug_assertions)]
339 {
340 let res = Self { bytes, _marker: PhantomData };
341 assert_eq!(fingerprint, res.fingerprint());
342 assert_eq!(node, res.node());
343 if let Some(len) = res.len() {
344 assert_eq!(edge_count, len);
345 }
346 }
347 Self { bytes, _marker: PhantomData }
348 }
349
350 #[inline]
351 fn unpack(&self) -> Unpacked {
352 let head = u16::from_le_bytes(self.bytes[..2].try_into().unwrap());
353 let hash = self.bytes[2..18].try_into().unwrap();
354 let fingerprint = self.bytes[18..].try_into().unwrap();
355
356 let kind = head & mask(Self::KIND_BITS) as u16;
357 let bytes_per_index = (head >> Self::KIND_BITS) & mask(Self::WIDTH_BITS) as u16;
358 let len = (head as usize) >> (Self::WIDTH_BITS + Self::KIND_BITS);
359
360 Unpacked {
361 len: len.checked_sub(1),
362 bytes_per_index: bytes_per_index as usize + 1,
363 kind: DepKind::new(kind),
364 hash: Fingerprint::from_le_bytes(hash).into(),
365 fingerprint: Fingerprint::from_le_bytes(fingerprint),
366 }
367 }
368
369 #[inline]
370 fn len(&self) -> Option<usize> {
371 self.unpack().len
372 }
373
374 #[inline]
375 fn bytes_per_index(&self) -> usize {
376 self.unpack().bytes_per_index
377 }
378
379 #[inline]
380 fn fingerprint(&self) -> Fingerprint {
381 self.unpack().fingerprint
382 }
383
384 #[inline]
385 fn node(&self) -> DepNode {
386 let Unpacked { kind, hash, .. } = self.unpack();
387 DepNode { kind, hash }
388 }
389
390 #[inline]
391 fn edges_header(&self, edge_list_data: &[u8]) -> EdgeHeader {
392 EdgeHeader {
393 repr: (edge_list_data.len() << DEP_NODE_WIDTH_BITS) | (self.bytes_per_index() - 1),
394 }
395 }
396}
397
398#[derive(Debug)]
399struct NodeInfo {
400 node: DepNode,
401 fingerprint: Fingerprint,
402 edges: EdgesVec,
403}
404
405impl NodeInfo {
406 fn encode<D: Deps>(&self, e: &mut FileEncoder) {
407 let NodeInfo { node, fingerprint, ref edges } = *self;
408 let header =
409 SerializedNodeHeader::<D>::new(node, fingerprint, edges.max_index(), edges.len());
410 e.write_array(header.bytes);
411
412 if header.len().is_none() {
413 e.emit_usize(edges.len());
414 }
415
416 let bytes_per_index = header.bytes_per_index();
417 for node_index in edges.iter() {
418 e.write_with(|dest| {
419 *dest = node_index.as_u32().to_le_bytes();
420 bytes_per_index
421 });
422 }
423 }
424
425 #[inline]
429 fn encode_promoted<D: Deps>(
430 e: &mut FileEncoder,
431 node: DepNode,
432 fingerprint: Fingerprint,
433 prev_index: SerializedDepNodeIndex,
434 prev_index_to_index: &IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>,
435 previous: &SerializedDepGraph,
436 ) -> usize {
437 let edges = previous.edge_targets_from(prev_index);
438 let edge_count = edges.size_hint().0;
439
440 let edge_max =
442 edges.clone().map(|i| prev_index_to_index[i].unwrap().as_u32()).max().unwrap_or(0);
443
444 let header = SerializedNodeHeader::<D>::new(node, fingerprint, edge_max, edge_count);
445 e.write_array(header.bytes);
446
447 if header.len().is_none() {
448 e.emit_usize(edge_count);
449 }
450
451 let bytes_per_index = header.bytes_per_index();
452 for node_index in edges {
453 let node_index = prev_index_to_index[node_index].unwrap();
454 e.write_with(|dest| {
455 *dest = node_index.as_u32().to_le_bytes();
456 bytes_per_index
457 });
458 }
459
460 edge_count
461 }
462}
463
464struct Stat {
465 kind: DepKind,
466 node_counter: u64,
467 edge_counter: u64,
468}
469
470struct EncoderState<D: Deps> {
471 previous: Arc<SerializedDepGraph>,
472 encoder: FileEncoder,
473 total_node_count: usize,
474 total_edge_count: usize,
475 stats: Option<FxHashMap<DepKind, Stat>>,
476
477 kind_stats: Vec<u32>,
479 marker: PhantomData<D>,
480}
481
482impl<D: Deps> EncoderState<D> {
483 fn new(encoder: FileEncoder, record_stats: bool, previous: Arc<SerializedDepGraph>) -> Self {
484 Self {
485 previous,
486 encoder,
487 total_edge_count: 0,
488 total_node_count: 0,
489 stats: record_stats.then(FxHashMap::default),
490 kind_stats: iter::repeat(0).take(D::DEP_KIND_MAX as usize + 1).collect(),
491 marker: PhantomData,
492 }
493 }
494
495 #[inline]
496 fn record(
497 &mut self,
498 node: DepNode,
499 edge_count: usize,
500 edges: impl FnOnce(&mut Self) -> Vec<DepNodeIndex>,
501 record_graph: &Option<Lock<DepGraphQuery>>,
502 ) -> DepNodeIndex {
503 let index = DepNodeIndex::new(self.total_node_count);
504
505 self.total_node_count += 1;
506 self.kind_stats[node.kind.as_usize()] += 1;
507 self.total_edge_count += edge_count;
508
509 if let Some(record_graph) = &record_graph {
510 let edges = edges(self);
512
513 outline(move || {
515 if let Some(record_graph) = &mut record_graph.try_lock() {
517 record_graph.push(index, node, &edges);
518 }
519 });
520 }
521
522 if let Some(stats) = &mut self.stats {
523 let kind = node.kind;
524
525 outline(move || {
527 let stat =
528 stats.entry(kind).or_insert(Stat { kind, node_counter: 0, edge_counter: 0 });
529 stat.node_counter += 1;
530 stat.edge_counter += edge_count as u64;
531 });
532 }
533
534 index
535 }
536
537 fn encode_node(
539 &mut self,
540 node: &NodeInfo,
541 record_graph: &Option<Lock<DepGraphQuery>>,
542 ) -> DepNodeIndex {
543 node.encode::<D>(&mut self.encoder);
544 self.record(node.node, node.edges.len(), |_| node.edges[..].to_vec(), record_graph)
545 }
546
547 #[inline]
554 fn encode_promoted_node(
555 &mut self,
556 prev_index: SerializedDepNodeIndex,
557 record_graph: &Option<Lock<DepGraphQuery>>,
558 prev_index_to_index: &IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>,
559 ) -> DepNodeIndex {
560 let node = self.previous.index_to_node(prev_index);
561
562 let fingerprint = self.previous.fingerprint_by_index(prev_index);
563 let edge_count = NodeInfo::encode_promoted::<D>(
564 &mut self.encoder,
565 node,
566 fingerprint,
567 prev_index,
568 prev_index_to_index,
569 &self.previous,
570 );
571
572 self.record(
573 node,
574 edge_count,
575 |this| {
576 this.previous
577 .edge_targets_from(prev_index)
578 .map(|i| prev_index_to_index[i].unwrap())
579 .collect()
580 },
581 record_graph,
582 )
583 }
584
585 fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {
586 let Self {
587 mut encoder,
588 total_node_count,
589 total_edge_count,
590 stats: _,
591 kind_stats,
592 marker: _,
593 previous: _,
594 } = self;
595
596 let node_count = total_node_count.try_into().unwrap();
597 let edge_count = total_edge_count.try_into().unwrap();
598
599 for count in kind_stats.iter() {
601 count.encode(&mut encoder);
602 }
603
604 debug!(?node_count, ?edge_count);
605 debug!("position: {:?}", encoder.position());
606 IntEncodedWithFixedSize(node_count).encode(&mut encoder);
607 IntEncodedWithFixedSize(edge_count).encode(&mut encoder);
608 debug!("position: {:?}", encoder.position());
609 let result = encoder.finish();
611 if let Ok(position) = result {
612 profiler.artifact_size("dep_graph", "dep-graph.bin", position as u64);
615 }
616 result
617 }
618}
619
620pub(crate) struct GraphEncoder<D: Deps> {
621 profiler: SelfProfilerRef,
622 status: Lock<Option<EncoderState<D>>>,
623 record_graph: Option<Lock<DepGraphQuery>>,
624}
625
626impl<D: Deps> GraphEncoder<D> {
627 pub(crate) fn new(
628 encoder: FileEncoder,
629 prev_node_count: usize,
630 record_graph: bool,
631 record_stats: bool,
632 profiler: &SelfProfilerRef,
633 previous: Arc<SerializedDepGraph>,
634 ) -> Self {
635 let record_graph = record_graph.then(|| Lock::new(DepGraphQuery::new(prev_node_count)));
636 let status = Lock::new(Some(EncoderState::new(encoder, record_stats, previous)));
637 GraphEncoder { status, record_graph, profiler: profiler.clone() }
638 }
639
640 pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery)) {
641 if let Some(record_graph) = &self.record_graph {
642 f(&record_graph.lock())
643 }
644 }
645
646 pub(crate) fn print_incremental_info(
647 &self,
648 total_read_count: u64,
649 total_duplicate_read_count: u64,
650 ) {
651 let mut status = self.status.lock();
652 let status = status.as_mut().unwrap();
653 if let Some(record_stats) = &status.stats {
654 let mut stats: Vec<_> = record_stats.values().collect();
655 stats.sort_by_key(|s| -(s.node_counter as i64));
656
657 const SEPARATOR: &str = "[incremental] --------------------------------\
658 ----------------------------------------------\
659 ------------";
660
661 eprintln!("[incremental]");
662 eprintln!("[incremental] DepGraph Statistics");
663 eprintln!("{SEPARATOR}");
664 eprintln!("[incremental]");
665 eprintln!("[incremental] Total Node Count: {}", status.total_node_count);
666 eprintln!("[incremental] Total Edge Count: {}", status.total_edge_count);
667
668 if cfg!(debug_assertions) {
669 eprintln!("[incremental] Total Edge Reads: {total_read_count}");
670 eprintln!("[incremental] Total Duplicate Edge Reads: {total_duplicate_read_count}");
671 }
672
673 eprintln!("[incremental]");
674 eprintln!(
675 "[incremental] {:<36}| {:<17}| {:<12}| {:<17}|",
676 "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
677 );
678 eprintln!("{SEPARATOR}");
679
680 for stat in stats {
681 let node_kind_ratio =
682 (100.0 * (stat.node_counter as f64)) / (status.total_node_count as f64);
683 let node_kind_avg_edges = (stat.edge_counter as f64) / (stat.node_counter as f64);
684
685 eprintln!(
686 "[incremental] {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
687 format!("{:?}", stat.kind),
688 node_kind_ratio,
689 stat.node_counter,
690 node_kind_avg_edges,
691 );
692 }
693
694 eprintln!("{SEPARATOR}");
695 eprintln!("[incremental]");
696 }
697 }
698
699 pub(crate) fn send(
700 &self,
701 node: DepNode,
702 fingerprint: Fingerprint,
703 edges: EdgesVec,
704 ) -> DepNodeIndex {
705 let _prof_timer = self.profiler.generic_activity("incr_comp_encode_dep_graph");
706 let node = NodeInfo { node, fingerprint, edges };
707 self.status.lock().as_mut().unwrap().encode_node(&node, &self.record_graph)
708 }
709
710 #[inline]
713 pub(crate) fn send_promoted(
714 &self,
715 prev_index: SerializedDepNodeIndex,
716 prev_index_to_index: &IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>,
717 ) -> DepNodeIndex {
718 let _prof_timer = self.profiler.generic_activity("incr_comp_encode_dep_graph");
719 self.status.lock().as_mut().unwrap().encode_promoted_node(
720 prev_index,
721 &self.record_graph,
722 prev_index_to_index,
723 )
724 }
725
726 pub(crate) fn finish(&self) -> FileEncodeResult {
727 let _prof_timer = self.profiler.generic_activity("incr_comp_encode_dep_graph_finish");
728
729 self.status.lock().take().unwrap().finish(&self.profiler)
730 }
731}