@@ -25,12 +25,9 @@ struct StackDepotNode {
2525 using hash_type = u64 ;
2626 hash_type stack_hash;
2727 u32 link;
28- atomic_uint32_t tag_and_use_count; // tag : 12 high bits; use_count : 20 ;
28+ u32 tag;
2929
3030 static const u32 kTabSizeLog = SANITIZER_ANDROID ? 16 : 20 ;
31- static const u32 kUseCountBits = 20 ;
32- static const u32 kMaxUseCount = 1 << kUseCountBits ;
33- static const u32 kUseCountMask = (1 << kUseCountBits ) - 1 ;
3431
3532 typedef StackTrace args_type;
3633 bool eq (hash_type hash, const args_type &args) const {
@@ -53,19 +50,6 @@ struct StackDepotNode {
5350 typedef StackDepotHandle handle_type;
5451};
5552
56- COMPILER_CHECK (StackDepotNode::kMaxUseCount >= (u32 )kStackDepotMaxUseCount );
57-
58- int StackDepotHandle::use_count () const {
59- return atomic_load (&node_->tag_and_use_count , memory_order_relaxed) &
60- StackDepotNode::kUseCountMask ;
61- }
62- void StackDepotHandle::inc_use_count_unsafe () {
63- u32 prev =
64- atomic_fetch_add (&node_->tag_and_use_count , 1 , memory_order_relaxed) &
65- StackDepotNode::kUseCountMask ;
66- CHECK_LT (prev + 1 , StackDepotNode::kMaxUseCount );
67- }
68-
6953// FIXME(dvyukov): this single reserved bit is used in TSan.
7054typedef StackDepotBase<StackDepotNode, 1 , StackDepotNode::kTabSizeLog >
7155 StackDepot;
@@ -74,15 +58,27 @@ static StackDepot theDepot;
7458// caching efficiency.
7559static TwoLevelMap<uptr *, StackDepot::kNodesSize1 , StackDepot::kNodesSize2 >
7660 tracePtrs;
61+ // Keep mutable data out of frequently access nodes to improve caching
62+ // efficiency.
63+ static TwoLevelMap<atomic_uint32_t , StackDepot::kNodesSize1 ,
64+ StackDepot::kNodesSize2 >
65+ useCounts;
66+
67+ int StackDepotHandle::use_count () const {
68+ return atomic_load_relaxed (&useCounts[id_]);
69+ }
70+
71+ void StackDepotHandle::inc_use_count_unsafe () {
72+ atomic_fetch_add (&useCounts[id_], 1 , memory_order_relaxed);
73+ }
7774
7875uptr StackDepotNode::allocated () {
79- return traceAllocator.allocated () + tracePtrs.MemoryUsage ();
76+ return traceAllocator.allocated () + tracePtrs.MemoryUsage () +
77+ useCounts.MemoryUsage ();
8078}
8179
8280void StackDepotNode::store (u32 id, const args_type &args, hash_type hash) {
83- CHECK_EQ (args.tag & (~kUseCountMask >> kUseCountBits ), args.tag );
84- atomic_store (&tag_and_use_count, args.tag << kUseCountBits ,
85- memory_order_relaxed);
81+ tag = args.tag ;
8682 stack_hash = hash;
8783 uptr *stack_trace = traceAllocator.alloc (args.size + 1 );
8884 *stack_trace = args.size ;
@@ -94,8 +90,6 @@ StackDepotNode::args_type StackDepotNode::load(u32 id) const {
9490 const uptr *stack_trace = tracePtrs[id];
9591 if (!stack_trace)
9692 return {};
97- u32 tag =
98- atomic_load (&tag_and_use_count, memory_order_relaxed) >> kUseCountBits ;
9993 return args_type (stack_trace + 1 , *stack_trace, tag);
10094}
10195
0 commit comments