Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

deps: V8: cherry-pick 597f885 #29367

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@

# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
'v8_embedder_string': '-node.11',
'v8_embedder_string': '-node.12',

##### V8 defaults for Node.js #####

Expand Down
124 changes: 80 additions & 44 deletions deps/v8/src/debug/debug-coverage.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
return start;
}

bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
int a_start = StartPosition(a);
int b_start = StartPosition(b);
if (a_start == b_start) return a.EndPosition() > b.EndPosition();
return a_start < b_start;
}

bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
DCHECK_NE(kNoSourcePosition, a.start);
DCHECK_NE(kNoSourcePosition, b.start);
Expand Down Expand Up @@ -482,32 +475,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
// Reset all counters on the DebugInfo to zero.
ResetAllBlockCounts(info);
}
} // anonymous namespace

std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
DCHECK(!isolate->is_best_effort_code_coverage());
std::unique_ptr<Coverage> result =
Collect(isolate, isolate->code_coverage_mode());
if (!isolate->is_collecting_type_profile() &&
(isolate->is_precise_binary_code_coverage() ||
isolate->is_block_binary_code_coverage())) {
// We do not have to hold onto feedback vectors for invocations we already
// reported. So we can reset the list.
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
}
return result;
}

std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
}

std::unique_ptr<Coverage> Coverage::Collect(
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
SharedToCounterMap counter_map;

void CollectAndMaybeResetCounts(Isolate* isolate,
SharedToCounterMap* counter_map,
v8::debug::CoverageMode coverage_mode) {
const bool reset_count =
collectionMode != v8::debug::CoverageMode::kBestEffort;
coverage_mode != v8::debug::CoverageMode::kBestEffort;

switch (isolate->code_coverage_mode()) {
case v8::debug::CoverageMode::kBlockBinary:
Expand All @@ -526,15 +499,15 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(shared.IsSubjectToDebugging());
uint32_t count = static_cast<uint32_t>(vector.invocation_count());
if (reset_count) vector.clear_invocation_count();
counter_map.Add(shared, count);
counter_map->Add(shared, count);
}
break;
}
case v8::debug::CoverageMode::kBestEffort: {
DCHECK(!isolate->factory()
->feedback_vectors_for_profiling_tools()
->IsArrayList());
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
HeapObjectIterator heap_iterator(isolate->heap());
for (HeapObject current_obj = heap_iterator.Next();
!current_obj.is_null(); current_obj = heap_iterator.Next()) {
Expand All @@ -543,8 +516,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
SharedFunctionInfo shared = func.shared();
if (!shared.IsSubjectToDebugging()) continue;
if (!(func.has_feedback_vector() ||
func.has_closure_feedback_cell_array()))
func.has_closure_feedback_cell_array())) {
continue;
}
uint32_t count = 0;
if (func.has_feedback_vector()) {
count =
Expand All @@ -555,7 +529,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
// atleast once. We don't have precise invocation count here.
count = 1;
}
counter_map.Add(shared, count);
counter_map->Add(shared, count);
}

// Also check functions on the stack to collect the count map. With lazy
Expand All @@ -564,12 +538,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
// updated (i.e. it didn't execute return / jump).
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
SharedFunctionInfo shared = it.frame()->function().shared();
if (counter_map.Get(shared) != 0) continue;
counter_map.Add(shared, 1);
if (counter_map->Get(shared) != 0) continue;
counter_map->Add(shared, 1);
}
break;
}
}
}

// A {SFI, count} tuple is used to sort by source range (stored on
// the SFI) and call count (in the counter map).
struct SharedFunctionInfoAndCount {
SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
: info(info),
count(count),
start(StartPosition(info)),
end(info.EndPosition()) {}

// Sort by:
// - start, ascending.
// - end, descending.
// - count, ascending.
bool operator<(const SharedFunctionInfoAndCount& that) const {
if (this->start != that.start) return this->start < that.start;
if (this->end != that.end) return this->end > that.end;
return this->count < that.count;
}

SharedFunctionInfo info;
uint32_t count;
int start;
int end;
};

} // anonymous namespace

std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
DCHECK(!isolate->is_best_effort_code_coverage());
std::unique_ptr<Coverage> result =
Collect(isolate, isolate->code_coverage_mode());
if (!isolate->is_collecting_type_profile() &&
(isolate->is_precise_binary_code_coverage() ||
isolate->is_block_binary_code_coverage())) {
// We do not have to hold onto feedback vectors for invocations we already
// reported. So we can reset the list.
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
}
return result;
}

std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
}

std::unique_ptr<Coverage> Coverage::Collect(
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
// Collect call counts for all functions.
SharedToCounterMap counter_map;
CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);

// Iterate shared function infos of every script and build a mapping
// between source ranges and invocation counts.
Expand All @@ -584,30 +610,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
result->emplace_back(script_handle);
std::vector<CoverageFunction>* functions = &result->back().functions;

std::vector<SharedFunctionInfo> sorted;
std::vector<SharedFunctionInfoAndCount> sorted;

{
// Sort functions by start position, from outer to inner functions.
SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
info = infos.Next()) {
sorted.push_back(info);
sorted.emplace_back(info, counter_map.Get(info));
}
std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
std::sort(sorted.begin(), sorted.end());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is perhaps part of the problem that this should be std::stable_sort(sorted.begin(), sorted.end());?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think there's a chance that the sorting behavior wouldn't flip/flop if we used stable_sort. But I'm not sure that we know if the "counted" function element is always added first, I like that we're more explicit and bring functions with coverage to the top of the sorting order.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unfortunately stable_sort wouldn't help. The order of shared function infos on the heap is nondeterministic and likewise the initial (unsorted) order in sorted. A stable sort would just preserve that initial order for "same" elements.

The fix in this change is to also consider the SFI's call count while sorting. As described in the comment, it's somewhat of a hack, but works. The cleaner solution, potentially in the future, would be to remove the optimization to omit "irrelevant" coverage.

}

// Stack to track nested functions, referring function by index.
std::vector<size_t> nesting;

// Use sorted list to reconstruct function nesting.
for (SharedFunctionInfo info : sorted) {
int start = StartPosition(info);
int end = info.EndPosition();
uint32_t count = counter_map.Get(info);
for (const SharedFunctionInfoAndCount& v : sorted) {
SharedFunctionInfo info = v.info;
int start = v.start;
int end = v.end;
uint32_t count = v.count;

// Find the correct outer function based on start position.
//
// This is not robust when considering two functions with identical source
// ranges. In this case, it is unclear which function is the inner / outer
// function. Above, we ensure that such functions are sorted in ascending
// `count` order, so at least our `parent_is_covered` optimization below
// should be fine.
// TODO(jgruber): Consider removing the optimization.
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
nesting.pop_back();
}

if (count != 0) {
switch (collectionMode) {
case v8::debug::CoverageMode::kBlockCount:
Expand Down