@@ -86,7 +86,9 @@ EmpiricalData::EmpiricalData(size_t seed, const absl::Span<const Entry> weights,
8686 total_bytes_allocated_(0 ),
8787 birth_sampler_(BirthRateDistribution(weights)),
8888 total_birth_rate_(0 ),
89- death_sampler_(weights.size()) {
89+ death_sampler_(weights.size()),
90+ num_allocated_recorded_(0 ),
91+ bytes_allocated_recorded_(0 ) {
9092 // First, compute average live count for each size in a heap of size
9193 // <total_mem>.
9294 double total = 0 ;
@@ -168,9 +170,12 @@ void EmpiricalData::DoDeath(const size_t i) {
168170}
169171
170172void EmpiricalData::RecordBirth (const size_t i) {
173+ birth_or_death_.push_back (true );
171174 birth_or_death_sizes_.push_back (i);
172175 SizeState& s = state_[i];
173176 death_sampler_.AdjustWeight (i, s.death_rate );
177+ num_allocated_recorded_++;
178+ bytes_allocated_recorded_ += s.size ;
174179 // We only care about keeping the number of objects correct when building the
175180 // trace. When we replay we will actually push the allocated address but
176181 // when building the trace we can just push nullptr to keep the length of live
@@ -181,14 +186,13 @@ void EmpiricalData::RecordBirth(const size_t i) {
181186void * EmpiricalData::ReplayBirth (const size_t i) {
182187 SizeState& s = state_[i];
183188 const size_t size = s.size ;
184- total_num_allocated_++;
185- total_bytes_allocated_ += size;
186189 void * p = alloc_ (size);
187190 s.objs .push_back (p);
188191 return p;
189192}
190193
191194void EmpiricalData::RecordDeath (const size_t i) {
195+ birth_or_death_.push_back (false );
192196 SizeState& s = state_[i];
193197 CHECK (!s.objs .empty ());
194198 birth_or_death_sizes_.push_back (i);
@@ -215,7 +219,6 @@ void EmpiricalData::RecordNext() {
215219 const double Both = B + T;
216220 absl::uniform_real_distribution<double > which (0 , Both);
217221 bool do_birth = which (rng_) < B;
218- birth_or_death_.push_back (do_birth);
219222
220223 if (do_birth) {
221224 size_t i = birth_sampler_ (rng_);
@@ -226,18 +229,25 @@ void EmpiricalData::RecordNext() {
226229 }
227230}
228231
229- void EmpiricalData::ReplayNext () {
230- bool do_birth = birth_or_death_[birth_or_death_index_];
231- if (do_birth) {
232- void * allocated = ReplayBirth (birth_or_death_sizes_[birth_or_death_index_]);
233- TouchAllocated (allocated);
234- } else {
235- ReplayDeath (birth_or_death_sizes_[birth_or_death_index_],
236- death_objects_[death_object_index_]);
237- __builtin_prefetch (death_object_pointers_[death_object_index_], 1 , 3 );
238- death_object_index_++;
232+ void EmpiricalData::ReplayTrace () {
233+ for (birth_or_death_index_ = 0 , death_object_index_ = 0 ;
234+ birth_or_death_index_ < birth_or_death_.size ();
235+ ++birth_or_death_index_) {
236+ bool do_birth = birth_or_death_[birth_or_death_index_];
237+ if (do_birth) {
238+ void * allocated =
239+ ReplayBirth (birth_or_death_sizes_[birth_or_death_index_]);
240+ TouchAllocated (allocated);
241+ } else {
242+ ReplayDeath (birth_or_death_sizes_[birth_or_death_index_],
243+ death_objects_[death_object_index_]);
244+ __builtin_prefetch (death_object_pointers_[death_object_index_], /* rw=*/ 1 ,
245+ /* locality*/ 3 );
246+ ++death_object_index_;
247+ }
239248 }
240- birth_or_death_index_++;
249+ total_num_allocated_ += num_allocated_recorded_;
250+ total_bytes_allocated_ += bytes_allocated_recorded_;
241251}
242252
243253void EmpiricalData::SnapshotLiveObjects () {
@@ -301,7 +311,7 @@ void EmpiricalData::BuildDeathObjectPointers() {
301311 death_object_pointers_.end ());
302312}
303313
304- void EmpiricalData::RepairToSnapshotState () {
314+ void EmpiricalData::RecordRepairToSnapshotState () {
305315 // Compared to the number of live objects when the snapshot was taken each
306316 // size state either
307317 // 1) Contains the same number of live objects as when the snapshot was taken,
@@ -312,29 +322,14 @@ void EmpiricalData::RepairToSnapshotState() {
312322 // number of true deallocations.
313323 for (int i = 0 ; i < state_.size (); i++) {
314324 while (state_[i].objs .size () < snapshot_state_[i].objs .size ()) {
315- DoBirth (i);
325+ RecordBirth (i);
316326 }
317327 while (state_[i].objs .size () > snapshot_state_[i].objs .size ()) {
318- DoDeath (i);
328+ RecordDeath (i);
319329 }
320330 }
321331}
322332
323- void EmpiricalData::RestartTraceIfNecessary () {
324- if (birth_or_death_index_ == birth_or_death_.size ()) {
325- // As the snapshotted lists of live objects will contain addresses which
326- // have already been freed we can't just call RestoreSnapshot(). Instead
327- // let's do the necessary allocations / deallocations to end up with the
328- // identical number of live objects we had when initially building the
329- // trace.
330- RepairToSnapshotState ();
331- // After the above call we can safely run through the recorded trace
332- // again.
333- birth_or_death_index_ = 0 ;
334- death_object_index_ = 0 ;
335- }
336- }
337-
338333std::vector<EmpiricalData::Entry> GetEmpiricalDataEntries (
339334 absl::string_view file) {
340335 std::vector<EmpiricalData::Entry> distribution;
0 commit comments