Bug 1866629 - Remove the sample_groups object in serialized counters, r=canaltinova,profiler-reviewers.

Differential Revision: https://phabricator.services.mozilla.com/D194695
This commit is contained in:
Florian Quèze 2023-12-12 11:28:07 +00:00
parent 9503eb0aca
commit 03bd9de29c
7 changed files with 245 additions and 328 deletions

View file

@ -948,15 +948,13 @@ void ProfileBuffer::StreamProfilerOverheadToJSON(
}); });
} }
struct CounterKeyedSample { struct CounterSample {
double mTime; double mTime;
uint64_t mNumber; uint64_t mNumber;
int64_t mCount; int64_t mCount;
}; };
using CounterKeyedSamples = Vector<CounterKeyedSample>; using CounterSamples = Vector<CounterSample>;
using CounterMap = HashMap<uint64_t, CounterKeyedSamples>;
// HashMap lookup, if not found, a default value is inserted. // HashMap lookup, if not found, a default value is inserted.
// Returns reference to (existing or new) value inside the HashMap. // Returns reference to (existing or new) value inside the HashMap.
@ -993,17 +991,15 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
// Valid sequence in the buffer: // Valid sequence in the buffer:
// CounterID // CounterID
// Time // Time
// ( CounterKey Count Number? )* // ( Count Number? )*
// //
// And the JSON (example): // And the JSON (example):
// "counters": { // "counters": {
// "name": "malloc", // "name": "malloc",
// "category": "Memory", // "category": "Memory",
// "description": "Amount of allocated memory", // "description": "Amount of allocated memory",
// "sample_groups": {
// "id": 0,
// "samples": { // "samples": {
// "schema": {"time": 0, "number": 1, "count": 2}, // "schema": {"time": 0, "count": 1, "number": 2},
// "data": [ // "data": [
// [ // [
// 16117.033968000002, // 16117.033968000002,
@ -1016,18 +1012,17 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
// 6801320 // 6801320
// ], // ],
// ], // ],
// }
// }
// }, // },
// }
// Build the map of counters and populate it // Build the map of counters and populate it
HashMap<void*, CounterMap> counters; HashMap<void*, CounterSamples> counters;
while (e.Has()) { while (e.Has()) {
// skip all non-Counters, including if we start in the middle of a counter // skip all non-Counters, including if we start in the middle of a counter
if (e.Get().IsCounterId()) { if (e.Get().IsCounterId()) {
void* id = e.Get().GetPtr(); void* id = e.Get().GetPtr();
CounterMap& counter = LookupOrAdd(counters, id); CounterSamples& data = LookupOrAdd(counters, id);
e.Next(); e.Next();
if (!e.Has() || !e.Get().IsTime()) { if (!e.Has() || !e.Get().IsTime()) {
ERROR_AND_CONTINUE("expected a Time entry"); ERROR_AND_CONTINUE("expected a Time entry");
@ -1035,10 +1030,6 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
double time = e.Get().GetDouble(); double time = e.Get().GetDouble();
e.Next(); e.Next();
if (time >= aSinceTime) { if (time >= aSinceTime) {
while (e.Has() && e.Get().IsCounterKey()) {
uint64_t key = e.Get().GetUint64();
CounterKeyedSamples& data = LookupOrAdd(counter, key);
e.Next();
if (!e.Has() || !e.Get().IsCount()) { if (!e.Has() || !e.Get().IsCount()) {
ERROR_AND_CONTINUE("expected a Count entry"); ERROR_AND_CONTINUE("expected a Count entry");
} }
@ -1051,9 +1042,8 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
number = e.Get().GetInt64(); number = e.Get().GetInt64();
e.Next(); e.Next();
} }
CounterKeyedSample sample = {time, number, count}; CounterSample sample = {time, number, count};
MOZ_RELEASE_ASSERT(data.append(sample)); MOZ_RELEASE_ASSERT(data.append(sample));
}
} else { } else {
// skip counter sample - only need to skip the initial counter // skip counter sample - only need to skip the initial counter
// id, then let the loop at the top skip the rest // id, then let the loop at the top skip the rest
@ -1062,14 +1052,18 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
e.Next(); e.Next();
} }
} }
// we have a map of a map of counter entries; dump them to JSON // we have a map of counter entries; dump them to JSON
if (counters.count() == 0) { if (counters.count() == 0) {
return; return;
} }
aWriter.StartArrayProperty("counters"); aWriter.StartArrayProperty("counters");
for (auto iter = counters.iter(); !iter.done(); iter.next()) { for (auto iter = counters.iter(); !iter.done(); iter.next()) {
CounterMap& counter = iter.get().value(); CounterSamples& samples = iter.get().value();
size_t size = samples.length();
if (size == 0) {
continue;
}
const BaseProfilerCount* base_counter = const BaseProfilerCount* base_counter =
static_cast<const BaseProfilerCount*>(iter.get().key()); static_cast<const BaseProfilerCount*>(iter.get().key());
@ -1080,17 +1074,6 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
aWriter.StringProperty("description", aWriter.StringProperty("description",
MakeStringSpan(base_counter->mDescription)); MakeStringSpan(base_counter->mDescription));
aWriter.StartArrayProperty("sample_groups");
for (auto counter_iter = counter.iter(); !counter_iter.done();
counter_iter.next()) {
CounterKeyedSamples& samples = counter_iter.get().value();
uint64_t key = counter_iter.get().key();
size_t size = samples.length();
if (size == 0) {
continue;
}
bool hasNumber = false; bool hasNumber = false;
for (size_t i = 0; i < size; i++) { for (size_t i = 0; i < size; i++) {
if (samples[i].mNumber != 0) { if (samples[i].mNumber != 0) {
@ -1099,9 +1082,6 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
} }
} }
aWriter.StartObjectElement();
{
aWriter.IntProperty("id", static_cast<int64_t>(key));
aWriter.StartObjectProperty("samples"); aWriter.StartObjectProperty("samples");
{ {
JSONSchemaWriter schema(aWriter); JSONSchemaWriter schema(aWriter);
@ -1129,9 +1109,8 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
writer.TimeMsElement(TIME, samples[i].mTime); writer.TimeMsElement(TIME, samples[i].mTime);
writer.IntElement(COUNT, samples[i].mCount - previousCount); writer.IntElement(COUNT, samples[i].mCount - previousCount);
if (hasNumber) { if (hasNumber) {
writer.IntElement( writer.IntElement(NUMBER, static_cast<int64_t>(samples[i].mNumber -
NUMBER, previousNumber));
static_cast<int64_t>(samples[i].mNumber - previousNumber));
} }
previousNumber = samples[i].mNumber; previousNumber = samples[i].mNumber;
previousCount = samples[i].mCount; previousCount = samples[i].mCount;
@ -1139,10 +1118,6 @@ void ProfileBuffer::StreamCountersToJSON(SpliceableJSONWriter& aWriter,
} }
aWriter.EndArray(); // data aWriter.EndArray(); // data
aWriter.EndObject(); // samples aWriter.EndObject(); // samples
}
aWriter.EndObject(); // sample_groups item
}
aWriter.EndArray(); // sample groups
aWriter.End(); // for each counter aWriter.End(); // for each counter
} }
aWriter.EndArray(); // counters aWriter.EndArray(); // counters
@ -1261,7 +1236,6 @@ bool ProfileBuffer::DuplicateLastSample(BaseProfilerThreadId aThreadId,
ProfileBufferEntry::Time( ProfileBufferEntry::Time(
(TimeStamp::Now() - aProcessStartTime).ToMilliseconds())); (TimeStamp::Now() - aProcessStartTime).ToMilliseconds()));
break; break;
case ProfileBufferEntry::Kind::CounterKey:
case ProfileBufferEntry::Kind::Number: case ProfileBufferEntry::Kind::Number:
case ProfileBufferEntry::Kind::Count: case ProfileBufferEntry::Kind::Count:
case ProfileBufferEntry::Kind::Responsiveness: case ProfileBufferEntry::Kind::Responsiveness:

View file

@ -2348,14 +2348,9 @@ void SamplerThread::Run() {
// create Buffer entries for each counter // create Buffer entries for each counter
buffer.AddEntry(ProfileBufferEntry::CounterId(counter)); buffer.AddEntry(ProfileBufferEntry::CounterId(counter));
buffer.AddEntry(ProfileBufferEntry::Time(delta.ToMilliseconds())); buffer.AddEntry(ProfileBufferEntry::Time(delta.ToMilliseconds()));
// XXX support keyed maps of counts
// In the future, we'll support keyed counters - for example, counters
// with a key which is a thread ID. For "simple" counters we'll just
// use a key of 0.
int64_t count; int64_t count;
uint64_t number; uint64_t number;
counter->Sample(count, number); counter->Sample(count, number);
buffer.AddEntry(ProfileBufferEntry::CounterKey(0));
buffer.AddEntry(ProfileBufferEntry::Count(count)); buffer.AddEntry(ProfileBufferEntry::Count(count));
if (number) { if (number) {
buffer.AddEntry(ProfileBufferEntry::Number(number)); buffer.AddEntry(ProfileBufferEntry::Number(number));

View file

@ -24,7 +24,7 @@ namespace mozilla {
class ProfileBufferChunkManagerWithLocalLimit; class ProfileBufferChunkManagerWithLocalLimit;
// Centrally defines the version of the gecko profiler JSON format. // Centrally defines the version of the gecko profiler JSON format.
const int GECKO_PROFILER_FORMAT_VERSION = 28; const int GECKO_PROFILER_FORMAT_VERSION = 29;
namespace baseprofiler::detail { namespace baseprofiler::detail {

View file

@ -43,7 +43,6 @@ static constexpr size_t ProfileBufferEntryNumChars = 8;
MACRO(TimeBeforeCompactStack, double, sizeof(double)) \ MACRO(TimeBeforeCompactStack, double, sizeof(double)) \
MACRO(TimeBeforeSameSample, double, sizeof(double)) \ MACRO(TimeBeforeSameSample, double, sizeof(double)) \
MACRO(CounterId, void*, sizeof(void*)) \ MACRO(CounterId, void*, sizeof(void*)) \
MACRO(CounterKey, uint64_t, sizeof(uint64_t)) \
MACRO(Number, uint64_t, sizeof(uint64_t)) \ MACRO(Number, uint64_t, sizeof(uint64_t)) \
MACRO(Count, int64_t, sizeof(int64_t)) \ MACRO(Count, int64_t, sizeof(int64_t)) \
MACRO(ProfilerOverheadTime, double, sizeof(double)) \ MACRO(ProfilerOverheadTime, double, sizeof(double)) \

View file

@ -1774,18 +1774,16 @@ void ProfileBuffer::StreamProfilerOverheadToJSON(
}); });
} }
struct CounterKeyedSample { struct CounterSample {
double mTime; double mTime;
uint64_t mNumber; uint64_t mNumber;
int64_t mCount; int64_t mCount;
}; };
using CounterKeyedSamples = Vector<CounterKeyedSample>; using CounterSamples = Vector<CounterSample>;
static LazyLogModule sFuzzyfoxLog("Fuzzyfox"); static LazyLogModule sFuzzyfoxLog("Fuzzyfox");
using CounterMap = HashMap<uint64_t, CounterKeyedSamples>;
// HashMap lookup, if not found, a default value is inserted. // HashMap lookup, if not found, a default value is inserted.
// Returns reference to (existing or new) value inside the HashMap. // Returns reference to (existing or new) value inside the HashMap.
template <typename HashM, typename Key> template <typename HashM, typename Key>
@ -1822,17 +1820,15 @@ void ProfileBuffer::StreamCountersToJSON(
// Valid sequence in the buffer: // Valid sequence in the buffer:
// CounterID // CounterID
// Time // Time
// ( CounterKey Count Number? )* // ( Count Number? )*
// //
// And the JSON (example): // And the JSON (example):
// "counters": { // "counters": {
// "name": "malloc", // "name": "malloc",
// "category": "Memory", // "category": "Memory",
// "description": "Amount of allocated memory", // "description": "Amount of allocated memory",
// "sample_groups": {
// "id": 0,
// "samples": { // "samples": {
// "schema": {"time": 0, "number": 1, "count": 2}, // "schema": {"time": 0, "count": 1, "number": 2},
// "data": [ // "data": [
// [ // [
// 16117.033968000002, // 16117.033968000002,
@ -1845,18 +1841,17 @@ void ProfileBuffer::StreamCountersToJSON(
// 6801320 // 6801320
// ], // ],
// ], // ],
// }
// }
// }, // },
// }
// Build the map of counters and populate it // Build the map of counters and populate it
HashMap<void*, CounterMap> counters; HashMap<void*, CounterSamples> counters;
while (e.Has()) { while (e.Has()) {
// skip all non-Counters, including if we start in the middle of a counter // skip all non-Counters, including if we start in the middle of a counter
if (e.Get().IsCounterId()) { if (e.Get().IsCounterId()) {
void* id = e.Get().GetPtr(); void* id = e.Get().GetPtr();
CounterMap& counter = LookupOrAdd(counters, id); CounterSamples& data = LookupOrAdd(counters, id);
e.Next(); e.Next();
if (!e.Has() || !e.Get().IsTime()) { if (!e.Has() || !e.Get().IsTime()) {
ERROR_AND_CONTINUE("expected a Time entry"); ERROR_AND_CONTINUE("expected a Time entry");
@ -1864,10 +1859,6 @@ void ProfileBuffer::StreamCountersToJSON(
double time = e.Get().GetDouble(); double time = e.Get().GetDouble();
e.Next(); e.Next();
if (time >= aSinceTime) { if (time >= aSinceTime) {
while (e.Has() && e.Get().IsCounterKey()) {
uint64_t key = e.Get().GetUint64();
CounterKeyedSamples& data = LookupOrAdd(counter, key);
e.Next();
if (!e.Has() || !e.Get().IsCount()) { if (!e.Has() || !e.Get().IsCount()) {
ERROR_AND_CONTINUE("expected a Count entry"); ERROR_AND_CONTINUE("expected a Count entry");
} }
@ -1880,9 +1871,8 @@ void ProfileBuffer::StreamCountersToJSON(
number = e.Get().GetInt64(); number = e.Get().GetInt64();
e.Next(); e.Next();
} }
CounterKeyedSample sample = {time, number, count}; CounterSample sample = {time, number, count};
MOZ_RELEASE_ASSERT(data.append(sample)); MOZ_RELEASE_ASSERT(data.append(sample));
}
} else { } else {
// skip counter sample - only need to skip the initial counter // skip counter sample - only need to skip the initial counter
// id, then let the loop at the top skip the rest // id, then let the loop at the top skip the rest
@ -1891,14 +1881,18 @@ void ProfileBuffer::StreamCountersToJSON(
e.Next(); e.Next();
} }
} }
// we have a map of a map of counter entries; dump them to JSON // we have a map of counter entries; dump them to JSON
if (counters.count() == 0) { if (counters.count() == 0) {
return; return;
} }
aWriter.StartArrayProperty("counters"); aWriter.StartArrayProperty("counters");
for (auto iter = counters.iter(); !iter.done(); iter.next()) { for (auto iter = counters.iter(); !iter.done(); iter.next()) {
CounterMap& counter = iter.get().value(); CounterSamples& samples = iter.get().value();
size_t size = samples.length();
if (size == 0) {
continue;
}
const BaseProfilerCount* base_counter = const BaseProfilerCount* base_counter =
static_cast<const BaseProfilerCount*>(iter.get().key()); static_cast<const BaseProfilerCount*>(iter.get().key());
@ -1909,17 +1903,6 @@ void ProfileBuffer::StreamCountersToJSON(
aWriter.StringProperty("description", aWriter.StringProperty("description",
MakeStringSpan(base_counter->mDescription)); MakeStringSpan(base_counter->mDescription));
aWriter.StartArrayProperty("sample_groups");
for (auto counter_iter = counter.iter(); !counter_iter.done();
counter_iter.next()) {
CounterKeyedSamples& samples = counter_iter.get().value();
uint64_t key = counter_iter.get().key();
size_t size = samples.length();
if (size == 0) {
continue;
}
bool hasNumber = false; bool hasNumber = false;
for (size_t i = 0; i < size; i++) { for (size_t i = 0; i < size; i++) {
if (samples[i].mNumber != 0) { if (samples[i].mNumber != 0) {
@ -1927,10 +1910,6 @@ void ProfileBuffer::StreamCountersToJSON(
break; break;
} }
} }
aWriter.StartObjectElement();
{
aWriter.IntProperty("id", static_cast<int64_t>(key));
aWriter.StartObjectProperty("samples"); aWriter.StartObjectProperty("samples");
{ {
JSONSchemaWriter schema(aWriter); JSONSchemaWriter schema(aWriter);
@ -1948,16 +1927,15 @@ void ProfileBuffer::StreamCountersToJSON(
for (size_t i = 0; i < size; i++) { for (size_t i = 0; i < size; i++) {
// Encode as deltas, and only encode if different than the previous // Encode as deltas, and only encode if different than the previous
// or next sample; Always write the first and last samples. // or next sample; Always write the first and last samples.
if (i == 0 || i == size - 1 || if (i == 0 || i == size - 1 || samples[i].mNumber != previousNumber ||
samples[i].mNumber != previousNumber ||
samples[i].mCount != previousCount || samples[i].mCount != previousCount ||
// Ensure we ouput the first 0 before skipping samples. // Ensure we ouput the first 0 before skipping samples.
(i >= 2 && (samples[i - 2].mNumber != previousNumber || (i >= 2 && (samples[i - 2].mNumber != previousNumber ||
samples[i - 2].mCount != previousCount))) { samples[i - 2].mCount != previousCount))) {
if (i != 0 && samples[i].mTime >= samples[i - 1].mTime) { if (i != 0 && samples[i].mTime >= samples[i - 1].mTime) {
MOZ_LOG(sFuzzyfoxLog, mozilla::LogLevel::Error, MOZ_LOG(sFuzzyfoxLog, mozilla::LogLevel::Error,
("Fuzzyfox Profiler Assertion: %f >= %f", ("Fuzzyfox Profiler Assertion: %f >= %f", samples[i].mTime,
samples[i].mTime, samples[i - 1].mTime)); samples[i - 1].mTime));
} }
MOZ_ASSERT(i == 0 || samples[i].mTime >= samples[i - 1].mTime); MOZ_ASSERT(i == 0 || samples[i].mTime >= samples[i - 1].mTime);
MOZ_ASSERT(samples[i].mNumber >= previousNumber); MOZ_ASSERT(samples[i].mNumber >= previousNumber);
@ -2001,10 +1979,6 @@ void ProfileBuffer::StreamCountersToJSON(
} }
aWriter.EndArray(); // data aWriter.EndArray(); // data
aWriter.EndObject(); // samples aWriter.EndObject(); // samples
}
aWriter.EndObject(); // sample_groups item
}
aWriter.EndArray(); // sample groups
aWriter.End(); // for each counter aWriter.End(); // for each counter
} }
aWriter.EndArray(); // counters aWriter.EndArray(); // counters
@ -2228,7 +2202,6 @@ bool ProfileBuffer::DuplicateLastSample(ProfilerThreadId aThreadId,
// We're done. // We're done.
return true; return true;
} }
case ProfileBufferEntry::Kind::CounterKey:
case ProfileBufferEntry::Kind::Number: case ProfileBufferEntry::Kind::Number:
case ProfileBufferEntry::Kind::Count: case ProfileBufferEntry::Kind::Count:
// Don't copy anything not part of a thread's stack sample // Don't copy anything not part of a thread's stack sample

View file

@ -4236,10 +4236,6 @@ void SamplerThread::Run() {
ActivePS::ControlledChunkManager(lock).TotalSize()); ActivePS::ControlledChunkManager(lock).TotalSize());
} }
#endif #endif
// In the future, we may support keyed counters - for example,
// counters with a key which is a thread ID. For "simple" counters
// we'll just use a key of 0.
buffer.AddEntry(ProfileBufferEntry::CounterKey(0));
buffer.AddEntry(ProfileBufferEntry::Count(sample.count)); buffer.AddEntry(ProfileBufferEntry::Count(sample.count));
if (sample.number) { if (sample.number) {
buffer.AddEntry(ProfileBufferEntry::Number(sample.number)); buffer.AddEntry(ProfileBufferEntry::Number(sample.number));

View file

@ -1393,12 +1393,7 @@ static void JSONRootCheck(const Json::Value& aRoot,
EXPECT_HAS_JSON(counter["name"], String); EXPECT_HAS_JSON(counter["name"], String);
EXPECT_HAS_JSON(counter["category"], String); EXPECT_HAS_JSON(counter["category"], String);
EXPECT_HAS_JSON(counter["description"], String); EXPECT_HAS_JSON(counter["description"], String);
GET_JSON(sampleGroups, counter["sample_groups"], Array); GET_JSON(samples, counter["samples"], Object);
for (const Json::Value& sampleGroup : sampleGroups) {
ASSERT_TRUE(sampleGroup.isObject());
EXPECT_HAS_JSON(sampleGroup["id"], UInt);
GET_JSON(samples, sampleGroup["samples"], Object);
GET_JSON(samplesSchema, samples["schema"], Object); GET_JSON(samplesSchema, samples["schema"], Object);
EXPECT_GE(samplesSchema.size(), 3u); EXPECT_GE(samplesSchema.size(), 3u);
GET_JSON_VALUE(samplesTime, samplesSchema["time"], UInt); GET_JSON_VALUE(samplesTime, samplesSchema["time"], UInt);
@ -1420,7 +1415,6 @@ static void JSONRootCheck(const Json::Value& aRoot,
} }
} }
} }
}
GET_JSON(threads, aRoot["threads"], Array); GET_JSON(threads, aRoot["threads"], Array);
const Json::ArrayIndex threadCount = threads.size(); const Json::ArrayIndex threadCount = threads.size();
@ -3572,12 +3566,7 @@ TEST(GeckoProfiler, Counters)
if (name == "TestCounter") { if (name == "TestCounter") {
EXPECT_EQ_JSON(counter["category"], String, COUNTER_NAME); EXPECT_EQ_JSON(counter["category"], String, COUNTER_NAME);
EXPECT_EQ_JSON(counter["description"], String, COUNTER_DESCRIPTION); EXPECT_EQ_JSON(counter["description"], String, COUNTER_DESCRIPTION);
GET_JSON(sampleGroups, counter["sample_groups"], Array); GET_JSON(samples, counter["samples"], Object);
for (const Json::Value& sampleGroup : sampleGroups) {
ASSERT_TRUE(sampleGroup.isObject());
EXPECT_EQ_JSON(sampleGroup["id"], UInt, 0u);
GET_JSON(samples, sampleGroup["samples"], Object);
GET_JSON(samplesSchema, samples["schema"], Object); GET_JSON(samplesSchema, samples["schema"], Object);
EXPECT_GE(samplesSchema.size(), 3u); EXPECT_GE(samplesSchema.size(), 3u);
GET_JSON_VALUE(samplesNumber, samplesSchema["number"], UInt); GET_JSON_VALUE(samplesNumber, samplesSchema["number"], UInt);
@ -3586,26 +3575,18 @@ TEST(GeckoProfiler, Counters)
for (const Json::Value& sample : samplesData) { for (const Json::Value& sample : samplesData) {
ASSERT_TRUE(sample.isArray()); ASSERT_TRUE(sample.isArray());
ASSERT_LT(nextExpectedTestCounter, expectedTestCountersCount); ASSERT_LT(nextExpectedTestCounter, expectedTestCountersCount);
EXPECT_EQ_JSON( EXPECT_EQ_JSON(sample[samplesNumber], UInt64,
sample[samplesNumber], UInt64,
expectedTestCounters[nextExpectedTestCounter].mNumber); expectedTestCounters[nextExpectedTestCounter].mNumber);
EXPECT_EQ_JSON( EXPECT_EQ_JSON(sample[samplesCount], Int64,
sample[samplesCount], Int64,
expectedTestCounters[nextExpectedTestCounter].mCount); expectedTestCounters[nextExpectedTestCounter].mCount);
++nextExpectedTestCounter; ++nextExpectedTestCounter;
} }
}
} else if (name == "TestCounter2") { } else if (name == "TestCounter2") {
EXPECT_TRUE(expectCounter2); EXPECT_TRUE(expectCounter2);
EXPECT_EQ_JSON(counter["category"], String, COUNTER_NAME2); EXPECT_EQ_JSON(counter["category"], String, COUNTER_NAME2);
EXPECT_EQ_JSON(counter["description"], String, COUNTER_DESCRIPTION2); EXPECT_EQ_JSON(counter["description"], String, COUNTER_DESCRIPTION2);
GET_JSON(sampleGroups, counter["sample_groups"], Array); GET_JSON(samples, counter["samples"], Object);
for (const Json::Value& sampleGroup : sampleGroups) {
ASSERT_TRUE(sampleGroup.isObject());
EXPECT_EQ_JSON(sampleGroup["id"], UInt, 0u);
GET_JSON(samples, sampleGroup["samples"], Object);
GET_JSON(samplesSchema, samples["schema"], Object); GET_JSON(samplesSchema, samples["schema"], Object);
EXPECT_GE(samplesSchema.size(), 3u); EXPECT_GE(samplesSchema.size(), 3u);
GET_JSON_VALUE(samplesNumber, samplesSchema["number"], UInt); GET_JSON_VALUE(samplesNumber, samplesSchema["number"], UInt);
@ -3624,7 +3605,6 @@ TEST(GeckoProfiler, Counters)
} }
} }
} }
}
EXPECT_EQ(nextExpectedTestCounter, expectedTestCountersCount); EXPECT_EQ(nextExpectedTestCounter, expectedTestCountersCount);
if (expectCounter2) { if (expectCounter2) {