@@ -836,10 +836,9 @@ bool WallProfiler::waitForSignal(uint64_t targetCallCount) {
836
836
0 , std::chrono::nanoseconds (samplingPeriod_ * maxRetries).count ()};
837
837
nanosleep (&ts, nullptr );
838
838
#endif
839
- auto res =
840
- noCollectCallCount_.load (std::memory_order_relaxed) >= targetCallCount;
841
- std::atomic_signal_fence (std::memory_order_release);
842
- return res;
839
+ auto res = noCollectCallCount_.load (std::memory_order_relaxed);
840
+ std::atomic_signal_fence (std::memory_order_acquire);
841
+ return res >= targetCallCount;
843
842
}
844
843
845
844
Result WallProfiler::StopImpl (bool restart, v8::Local<v8::Value>& profile) {
@@ -850,14 +849,14 @@ Result WallProfiler::StopImpl(bool restart, v8::Local<v8::Value>& profile) {
850
849
uint64_t callCount = 0 ;
851
850
auto oldProfileId = profileId_;
852
851
if (restart && workaroundV8Bug_) {
852
+ std::atomic_signal_fence (std::memory_order_release);
853
853
collectionMode_.store (CollectionMode::kNoCollect ,
854
854
std::memory_order_relaxed);
855
- std::atomic_signal_fence (std::memory_order_release);
856
855
waitForSignal ();
857
856
} else if (withContexts_) {
857
+ std::atomic_signal_fence (std::memory_order_release);
858
858
collectionMode_.store (CollectionMode::kNoCollect ,
859
859
std::memory_order_relaxed);
860
- std::atomic_signal_fence (std::memory_order_release);
861
860
862
861
// make sure timestamp changes to avoid having samples from previous profile
863
862
auto now = Now ();
@@ -899,9 +898,9 @@ Result WallProfiler::StopImpl(bool restart, v8::Local<v8::Value>& profile) {
899
898
auto now = Now ();
900
899
while (Now () == now) {
901
900
}
901
+ std::atomic_signal_fence (std::memory_order_release);
902
902
collectionMode_.store (CollectionMode::kCollectContexts ,
903
903
std::memory_order_relaxed);
904
- std::atomic_signal_fence (std::memory_order_release);
905
904
}
906
905
907
906
if (withContexts_) {
@@ -936,10 +935,10 @@ Result WallProfiler::StopImpl(bool restart, v8::Local<v8::Value>& profile) {
936
935
Dispose (v8::Isolate::GetCurrent (), true );
937
936
} else if (workaroundV8Bug_) {
938
937
waitForSignal (callCount + 1 );
938
+ std::atomic_signal_fence (std::memory_order_release);
939
939
collectionMode_.store (withContexts_ ? CollectionMode::kCollectContexts
940
940
: CollectionMode::kPassThrough ,
941
941
std::memory_order_relaxed);
942
- std::atomic_signal_fence (std::memory_order_release);
943
942
}
944
943
945
944
started_ = restart;
@@ -1089,18 +1088,12 @@ void WallProfiler::OnGCStart(v8::Isolate* isolate) {
1089
1088
if (curCount == 0 ) {
1090
1089
gcAsyncId = GetAsyncIdNoGC (isolate);
1091
1090
}
1092
- gcCount.store (curCount + 1 , std::memory_order_relaxed);
1093
1091
std::atomic_signal_fence (std::memory_order_release);
1092
+ gcCount.store (curCount + 1 , std::memory_order_relaxed);
1094
1093
}
1095
1094
1096
1095
void WallProfiler::OnGCEnd () {
1097
- auto newCount = gcCount.load (std::memory_order_relaxed) - 1 ;
1098
- std::atomic_signal_fence (std::memory_order_acquire);
1099
- gcCount.store (newCount, std::memory_order_relaxed);
1100
- std::atomic_signal_fence (std::memory_order_release);
1101
- if (newCount == 0 ) {
1102
- gcAsyncId = -1 ;
1103
- }
1096
+ gcCount.fetch_sub (1 , std::memory_order_relaxed);
1104
1097
}
1105
1098
1106
1099
void WallProfiler::PushContext (int64_t time_from,
0 commit comments