0
0
mirror of https://github.com/mongodb/mongo.git synced 2024-11-21 12:39:08 +01:00

SERVER-94511 Test the ability of the product to handle degenerate queries (#28658)

GitOrigin-RevId: d2bc48a6ba65fc5c698d815c2284a7e945c8ad8e
This commit is contained in:
Philip Stoev 2024-11-07 07:38:20 +02:00 committed by MongoDB Bot
parent 3e8f271d98
commit 5af7bc78e1
15 changed files with 1973 additions and 0 deletions

View File

@ -0,0 +1,23 @@
# This set of tests exercises the limits of the product by running workloads that
# are stretching it in various dimensions, e.g. many collections, many $match predicates
# and so on.
test_kind: js_test
selector:
roots:
- jstests/product_limits/**/*.js
executor:
archive:
hooks:
- ValidateCollections
hooks:
- class: ValidateCollections
shell_options:
global_vars:
TestData:
skipValidationOnNamespaceNotFound: false
fixture:
class: MongoDFixture
mongod_options:
set_parameters:
enableTestCommands: 1

View File

@ -1850,3 +1850,16 @@ tasks:
- func: "run tests"
vars:
resmoke_jobs_max: 1
- <<: *task_template
name: product_limits
tags:
[
"assigned_to_jira_team_server_query_execution",
"development_critical_single_variant",
]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_jobs_max: 1

View File

@ -183,6 +183,8 @@ variables:
- name: vector_search_ssl
- name: selinux_rhel8_enterprise
- name: generate_buildid_to_debug_symbols_mapping
- name: product_limits
cron: "0 4 * * 0" # From the ${project_weekly_cron} parameter
buildvariants:
- <<: *generic_linux_compile_params
@ -331,6 +333,8 @@ buildvariants:
# TODO(SERVER-90936): Remove this once streams_kafka* tests can work with the "default" tag.
- name: streams_kafka
- name: streams_kafka_gwproxy
- name: product_limits
cron: "0 4 * * 0" # From the ${project_weekly_cron} parameter
# This build variant is used to test suites that use sharded cluster fixture with embedded router mode.
# Also supports ProgramRunner/ShardingTest driven suites that look for an embedded router flag in TestData.

View File

@ -0,0 +1,252 @@
import {range} from "jstests/product_limits/libs/util.js";
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
export class WorkloadMatchArrayManyConditions extends PipelineWorkload {
/** $match with many conditions over one array field */
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
match.push({f0: {$eq: i}});
}
return [{$match: {$and: match}}, {$unset: "_id"}];
}
}
export class WorkloadMatchArrayExact extends PipelineWorkload {
pipeline() {
return [{$match: {f0: range(this.scale())}}, {$unset: "_id"}];
}
}
export class WorkloadMatchArrayIndexPosition extends PipelineWorkload {
pipeline() {
let match = {};
for (let i = 0; i < this.scale(); i++) {
match[`f0.${i}`] = i;
}
return [{$match: match}, {$unset: "_id"}];
}
}
export class WorkloadAllElementsTrue extends PipelineWorkload {
pipeline() {
return [{$project: {"allElementsTrue": {$allElementsTrue: "$f0"}}}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadAnyElementTrue extends PipelineWorkload {
pipeline() {
return [{$project: {"anyElementTrue": {$anyElementTrue: "$f0"}}}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadArrayToObject extends PipelineWorkload {
pipeline() {
return [{$project: {"arrayToObject": {$zip: {inputs: ["$f0", "$f0"]}}}}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadConcatArrays extends PipelineWorkload {
pipeline() {
let arrayList = range(this.scale()).map((i) => [`$f$i`]);
return [{$project: {"size": {$size: [{$concatArrays: arrayList}]}}}, {$unset: "_id"}];
}
result() {
return [{"size": this.scale()}];
}
}
export class WorkloadFilter extends PipelineWorkload {
pipeline() {
return [
{$project: {"f0": {$filter: {input: "$f0", as: "f0", cond: {$gte: ["$$f0", 0]}}}}},
{$count: "cnt"}
];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadElemMatchGte extends PipelineWorkload {
/** $elemMatchEq */
pipeline() {
return [{$match: {f0: {$elemMatch: {$gte: 0}}}}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadIndexOfArray extends PipelineWorkload {
pipeline() {
return [
{$project: {"indexOfArray": {$indexOfArray: ["$f0", this.scale() - 1]}}},
{$unset: "_id"}
];
}
result() {
return [{indexOfArray: this.scale() - 1}];
}
}
export class WorkloadReverseArray extends PipelineWorkload {
pipeline() {
return [{$project: {"reverseArray": {$reverseArray: "$f0"}}}, {$count: "cnt"}];
}
result() {
return [{cnt: 1}];
}
}
export class WorkloadSetDifference extends PipelineWorkload {
pipeline() {
return [{$project: {"setDifference": {$setDifference: ["$f0", "$f0"]}}}, {$unset: "_id"}];
}
result() {
return [{setDifference: []}];
}
}
export class WorkloadSetIntersection extends PipelineWorkload {
pipeline() {
return [
{$project: {"setIntersection": {$size: {$setIntersection: ["$f0", "$f0"]}}}},
{$unset: "_id"}
];
}
result() {
return [{setIntersection: this.scale()}];
}
}
export class WorkloadSetEquals extends PipelineWorkload {
pipeline() {
return [{$project: {"setEquals": {$setEquals: ["$f0", "$f0"]}}}, {$unset: "_id"}];
}
result() {
return [{setEquals: true}];
}
}
export class WorkloadZipArrayFields extends PipelineWorkload {
pipeline() {
return [
{"$project": {"zip": {"$size": {"$zip": {"inputs": ["$f0", "$f0"]}}}}},
{$unset: "_id"}
];
}
result() {
return [{"zip": this.scale()}];
}
}
export class WorkloadMap extends PipelineWorkload {
pipeline() {
return [
{
"$project":
{"map": {"$size": {"$map": {input: "$f0", as: "f", in : {$add: ["$$f", 1]}}}}}
},
{$unset: "_id"}
];
}
result() {
return [{"map": this.scale()}];
}
}
export class WorkloadReduce extends PipelineWorkload {
pipeline() {
return [
{
"$project": {
"reduce": {
"$reduce":
{input: "$f0", initialValue: 0, in : {$max: ["$$value", "$$this"]}}
}
}
},
{$unset: "_id"}
];
}
result() {
return [{"reduce": this.scale() - 1}];
}
}
export class WorkloadZipManyArrays extends PipelineWorkload {
pipeline() {
let zipList = [];
for (let i = 0; i < this.scale(); i++) {
zipList.push([`$f${i}`]);
}
return [{$project: {"zip": {$zip: {inputs: zipList}}}}, {$unset: "_id"}];
}
result() {
return [{"zip": [range(this.scale())]}];
}
}
export class WorkloadAddToSet extends PipelineWorkload {
pipeline() {
return [
{"$group": {"_id": null, "f": {$addToSet: "$f0"}}},
{$project: {_id: 0, f: {$sortArray: {input: "$f", sortBy: 1}}}}
];
}
result() {
return [{f: range(this.scale())}];
}
}
export class WorkloadInOverArrayField extends PipelineWorkload {
pipeline() {
return [{$match: {f0: {$in: range(this.scale())}}}, {$count: 'cnt'}];
}
result() {
// 1 row in this dataset
return [{cnt: 1}];
}
}
export class WorkloadAll extends PipelineWorkload {
/** $all */
pipeline() {
return [{$match: {f0: {$all: range(this.scale())}}}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}

View File

@ -0,0 +1,437 @@
import {ShardingTest} from "jstests/libs/shardingtest.js";
import * as ArrayWorkloads from "jstests/product_limits/libs/array.js";
import * as FindWorkloads from "jstests/product_limits/libs/find.js";
import * as GroupingWorkloads from "jstests/product_limits/libs/grouping.js";
import * as LongPipelineWorkloads from "jstests/product_limits/libs/long_pipelines.js";
import * as MatchWorkloads from "jstests/product_limits/libs/match.js";
import * as OperatorWorkloads from "jstests/product_limits/libs/operators.js";
import * as StageWorkloads from "jstests/product_limits/libs/stages.js";
import * as TextSearchWorkloads from "jstests/product_limits/libs/text_search.js";
import {DEFAULT_SCALE, range} from "jstests/product_limits/libs/util.js";
export class Dataset {
scale() {
return DEFAULT_SCALE;
}
collection() {
return "coll0";
}
db() {
return db;
}
runDataset() {
const db = this.db();
const session = db.getMongo().startSession();
const sessionDb = session.getDatabase(this.constructor.name);
print(`Populating dataset ${this.constructor.name} ...`);
this.populate(sessionDb);
print("Population complete.");
for (const workload of this.workloads()) {
const wl = new workload;
print(`Running workload ${this.constructor.name}.${wl.constructor.name}`);
wl.runWorkload(this, session, sessionDb);
}
this.stop();
}
workloads() {
assert(false, `No workloads() specified for dataset ${this.constructor.name}.`);
}
stop() {
// Nothing to do for the default case
}
data() {
assert(false, `No data() specified for dataset ${this.constructor.name}.`);
}
}
export class DatasetOneField extends Dataset {
workloads() {
return [
ArrayWorkloads.WorkloadAddToSet,
FindWorkloads.WorkloadFindOverSingleField,
GroupingWorkloads.WorkloadBucketAutoManyBuckets,
GroupingWorkloads.WorkloadBucketManyBoundaries,
GroupingWorkloads.WorkloadManyAccumulatorsSameField,
GroupingWorkloads.WorkloadSetWindowFieldsManyPartitions,
GroupingWorkloads.WorkloadTopK,
LongPipelineWorkloads.WorkloadAddFields,
LongPipelineWorkloads.WorkloadFacetManyStages,
MatchWorkloads.WorkloadAndOverSingleField,
MatchWorkloads.WorkloadAndPlusOrOverSingleField,
MatchWorkloads.WorkloadIn,
MatchWorkloads.WorkloadManyIns,
MatchWorkloads.WorkloadNin,
MatchWorkloads.WorkloadOrOverSingleField,
MatchWorkloads.WorkloadOrPlusAndOverSingleField,
StageWorkloads.WorkloadLongFieldName,
StageWorkloads.WorkloadManyDocuments,
StageWorkloads.WorkloadReplaceRoot,
];
}
populate(db) {
db.createCollection(this.collection());
const coll = db.getCollection(this.collection());
for (let i = 0; i < this.scale(); i++) {
assert.commandWorked(coll.insert({f0: i}));
}
}
data() {
return range(this.scale()).map((i) => ({f0: i}));
}
}
export class DatasetOneStringField extends Dataset {
workloads() {
return [
MatchWorkloads.WorkloadRegex,
MatchWorkloads.WorkloadRegexInIn,
];
}
populate(db) {
db.createCollection(this.collection());
const coll = db.getCollection(this.collection());
for (let i = 0; i < this.scale(); i++) {
assert.commandWorked(coll.insert({f0: `${i}`}));
}
}
data() {
return range(this.scale()).map((i) => ({f0: `${i}`}));
}
}
export class DatasetOneDocumentOneField extends Dataset {
workloads() {
return [OperatorWorkloads.WorkloadRange, StageWorkloads.WorkloadNestedProject];
}
populate(db) {
db.createCollection(this.collection());
const coll = db.getCollection(this.collection());
assert.commandWorked(coll.insert({f0: 0}));
}
}
export class DatasetOneFieldIndex extends DatasetOneField {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'f0': 1}));
}
}
export class DatasetOneFieldPartialIndex extends DatasetOneField {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({f0: 1}, {
partialFilterExpression: {f0: {$in: range(this.scale())}}
}));
}
}
export class DatasetWideArray extends Dataset {
workloads() {
return [
ArrayWorkloads.WorkloadAll,
ArrayWorkloads.WorkloadAllElementsTrue,
ArrayWorkloads.WorkloadAnyElementTrue,
ArrayWorkloads.WorkloadArrayToObject,
ArrayWorkloads.WorkloadConcatArrays,
ArrayWorkloads.WorkloadElemMatchGte,
ArrayWorkloads.WorkloadFilter,
ArrayWorkloads.WorkloadIndexOfArray,
ArrayWorkloads.WorkloadInOverArrayField,
ArrayWorkloads.WorkloadMap,
ArrayWorkloads.WorkloadMatchArrayExact,
ArrayWorkloads.WorkloadMatchArrayIndexPosition,
ArrayWorkloads.WorkloadMatchArrayManyConditions,
ArrayWorkloads.WorkloadReduce,
ArrayWorkloads.WorkloadReverseArray,
ArrayWorkloads.WorkloadSetDifference,
ArrayWorkloads.WorkloadSetEquals,
ArrayWorkloads.WorkloadSetIntersection,
ArrayWorkloads.WorkloadZipArrayFields,
StageWorkloads.WorkloadUnwind,
];
}
populate(db) {
const collName = this.collection();
const coll = db.getCollection(collName);
assert.commandWorked(coll.insertMany(this.data()));
}
data() {
return [{f0: range(this.scale())}];
}
}
export class DatasetWideArrayIndex extends DatasetWideArray {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'f0': 1}));
}
}
export class DatasetManyCollections extends Dataset {
workloads() {
return [
LongPipelineWorkloads.WorkloadManyCollectionsInLookupBushy,
LongPipelineWorkloads.WorkloadManyCollectionsInUnionWith,
];
}
populate(db) {
for (let i = 0; i < this.scale(); i++) {
const collName = `coll${i}`;
print(`Creating collection ${collName}`);
db.createCollection(collName);
const coll = db.getCollection(collName);
assert.commandWorked(coll.insert({f0: 1}));
}
}
}
export class DatasetManyFields extends Dataset {
workloads() {
return [
ArrayWorkloads.WorkloadZipManyArrays,
FindWorkloads.WorkloadFindOverManyFields,
GroupingWorkloads.WorkloadBucketAutoManyOutputs,
GroupingWorkloads.WorkloadBucketManyOutputs,
GroupingWorkloads.WorkloadDensifyManyFields,
GroupingWorkloads.WorkloadFillManyPartitionFields,
GroupingWorkloads.WorkloadManyAccumulatorsManyFields,
GroupingWorkloads.WorkloadManyGroupingFields,
GroupingWorkloads.WorkloadSetWindowFieldsManyOutputs,
GroupingWorkloads.WorkloadSetWindowFieldsManySortBy,
LongPipelineWorkloads.WorkloadManyMatchStages,
MatchWorkloads.WorkloadAndOverManyFields,
MatchWorkloads.WorkloadAndPlusOrOverManyFields,
MatchWorkloads.WorkloadExists,
MatchWorkloads.WorkloadMatchOverManyFields,
MatchWorkloads.WorkloadOrOverManyFields,
MatchWorkloads.WorkloadOrPlusAndOverManyFields,
OperatorWorkloads.WorkloadConcat,
OperatorWorkloads.WorkloadCond,
OperatorWorkloads.WorkloadSwitch,
StageWorkloads.WorkloadFacetManyFields,
StageWorkloads.WorkloadFillManyOutputs,
StageWorkloads.WorkloadFillManySortFields,
StageWorkloads.WorkloadGetField,
StageWorkloads.WorkloadLetManyVars,
StageWorkloads.WorkloadMergeManyLet,
StageWorkloads.WorkloadProjectManyExpressions,
StageWorkloads.WorkloadProjectManyFields,
StageWorkloads.WorkloadSort,
StageWorkloads.WorkloadSortByCount,
StageWorkloads.WorkloadUnset,
];
}
populate(db) {
const collName = this.collection();
let row = {};
for (let i = 0; i < this.scale(); i++) {
const fieldName = `f${i}`;
row[fieldName] = i;
}
print(`Creating collection ${collName}`);
db.createCollection(collName);
const coll = db.getCollection(collName);
assert.commandWorked(coll.insert(row));
}
data() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [result];
}
field_list() {
return range(this.scale()).map((i) => `$f${i}`);
}
value_list() {
return range(this.scale());
}
}
export class DatasetManyFieldsMultiFieldIndex extends DatasetManyFields {
populate(db) {
super.populate(db);
let indexColumns = {};
for (let i = 0; i < 32; i++) {
indexColumns[`f${i}`] = 1;
}
assert.commandWorked(db.getCollection(this.collection()).createIndex(indexColumns));
}
}
export class DatasetManyFieldsPartialIndex extends DatasetManyFields {
populate(db) {
super.populate(db);
let indexColumns = {};
for (let i = 0; i < 32; i++) {
indexColumns[`f${i}`] = 1;
}
let indexConds = {};
for (let i = 0; i < this.scale(); i++) {
indexConds[`f${i}`] = i;
}
assert.commandWorked(db.getCollection(this.collection()).createIndex(indexColumns, {
partialFilterExpression: indexConds
}));
}
}
export class DatasetManyFieldsIndexes extends DatasetManyFields {
populate(db) {
super.populate(db);
for (let i = 0; i < 63; i++) {
assert.commandWorked(db.getCollection(this.collection()).createIndex({[`f${i}`]: 1}));
}
}
}
export class DatasetManyFieldsWildcardIndex extends DatasetManyFields {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'$**': 1}));
}
}
export class DatasetNestedJSON extends Dataset {
scale() {
return 100;
}
workloads() {
return [MatchWorkloads.WorkloadMatchLongPath];
}
populate(db) {
const collName = this.collection();
let path = [];
for (let i = 0; i < this.scale(); i++) {
path.push(`f${i}`);
}
// $addFields will generate the entire hierarchy for us.
let pipeline = [
{$documents: [{}]},
{$addFields: {[path.join(".")]: "abc"}},
{
$out: {
db: this.constructor.name,
coll: collName,
}
}
];
db.aggregate(pipeline).toArray();
}
}
export class DatasetLongValue extends Dataset {
scale() {
return 10000000;
}
workloads() {
return [MatchWorkloads.WorkloadLongValue];
}
populate(db) {
const collName = this.collection();
const coll = db.getCollection(collName);
assert.commandWorked(coll.insert(this.data()));
}
data() {
return [{
// We need one stand-alone 'x' for the fulltext search workloads below
f0: 'x'.repeat(this.scale()) + ' x'
}];
}
}
export class DatasetLongValueIndex extends DatasetLongValue {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'f0': 1}));
}
}
export class DatasetLongValueHashed extends DatasetLongValue {
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'f0': "hashed"}));
}
}
export class DatasetLongValueTextIndex extends DatasetLongValue {
workloads() {
return [
TextSearchWorkloads.WorkloadTextSearchLongString,
TextSearchWorkloads.WorkloadTextSearchManyWords,
TextSearchWorkloads.WorkloadTextSearchNegation
];
}
populate(db) {
super.populate(db);
assert.commandWorked(db.getCollection(this.collection()).createIndex({'f0': "text"}));
}
}
export class DatasetSharded extends DatasetManyFields {
db() {
this.shardedTest = new ShardingTest({shards: 32, other: {chunkSize: 1}});
const primaryShard = this.shardedTest.shard0;
const dbName = this.constructor.name;
const db = this.shardedTest.s.getDB(dbName);
assert.commandWorked(this.shardedTest.s.adminCommand(
{enableSharding: dbName, primaryShard: primaryShard.shardName}));
let collName = this.collection();
let shardKey = {};
for (let i = 0; i < 32; i++) {
shardKey[`f${i}`] = 1;
}
db.createCollection(collName);
assert.commandWorked(this.shardedTest.s.adminCommand(
{shardCollection: `${dbName}.${collName}`, key: shardKey}));
return db;
}
stop() {
this.shardedTest.stop();
}
}
export const DATASETS = [
DatasetOneField,
DatasetOneFieldIndex,
DatasetOneFieldPartialIndex,
DatasetOneDocumentOneField,
DatasetOneStringField,
DatasetWideArray,
DatasetWideArrayIndex,
DatasetManyCollections,
DatasetManyFields,
DatasetManyFieldsMultiFieldIndex,
DatasetManyFieldsIndexes,
DatasetManyFieldsPartialIndex,
DatasetManyFieldsWildcardIndex,
DatasetLongValue,
DatasetLongValueIndex,
DatasetLongValueHashed,
DatasetLongValueTextIndex,
DatasetSharded,
DatasetNestedJSON
];

View File

@ -0,0 +1,48 @@
import {range} from "jstests/product_limits/libs/util.js";
import {Workload} from "jstests/product_limits/libs/workload.js";
export class FindWorkload extends Workload {
runWorkload(dataset, _, db) {
const coll = db.getCollection(this.collection());
const find = this.find(dataset);
printjsononeline(find);
coll.explain("allPlansExecution").find(dataset, {"_id": 0});
const startTime = Date.now();
const actualResult = coll.find(find, {"_id": 0}).toArray();
const duration = Date.now() - startTime;
print(`${dataset.constructor.name}.${this.constructor.name} took ${duration} ms.`);
this.check(dataset, actualResult);
print("Find execution complete.");
}
}
export class WorkloadFindOverSingleField extends FindWorkload {
scale() {
// SERVER-96119 SBE: Stack overflow with many conditions to a $match, index
return Math.min(1000, super.scale());
}
find() {
let find = [];
for (let i = 0; i < this.scale(); i++) {
find.push({'f0': {$lt: this.scale() + i}});
}
return {$and: find};
}
result() {
return range(this.scale()).map((i) => ({f0: i}));
}
}
export class WorkloadFindOverManyFields extends FindWorkload {
find() {
let find = {};
for (let i = 0; i < this.scale(); i++) {
find[`f${i}`] = i;
}
return find;
}
}

View File

@ -0,0 +1,303 @@
import {range} from "./jstests/product_limits/libs/util.js";
import {PipelineWorkload} from "./jstests/product_limits/libs/workload.js";
export class WorkloadManyAccumulatorsSameField extends PipelineWorkload {
/** Many accumulators in a single $group stage */
pipeline() {
let accumulators = {};
for (let i = 0; i < this.scale(); i++) {
accumulators[`f${i}`] = {$max: {$add: ["$f0", i]}};
}
accumulators['_id'] = null;
return [{$group: accumulators}];
}
result() {
let row = {"_id": null};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = this.scale() + i - 1;
}
return [row];
}
}
export class WorkloadManyGroupingFields extends PipelineWorkload {
/** Many fields in the _id argument of $group */
pipeline(dataset) {
let key = dataset.field_list();
return [{$group: {_id: key, f0: {$max: 0}}}];
}
result(dataset) {
let key = dataset.value_list();
return [{"_id": key, f0: 0}];
}
}
export class WorkloadManyAccumulatorsManyFields extends PipelineWorkload {
/** Many accumulators over distinct fields in a single $group stage */
pipeline() {
let accumulators = {};
for (let i = 0; i < this.scale(); i++) {
accumulators[`f${i}`] = {$max: `$f${i}`};
}
accumulators['_id'] = null;
return [{$group: accumulators}];
}
result() {
let row = {"_id": null};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadBucketManyBoundaries extends PipelineWorkload {
/** Many boundaries in a single $bucket stage */
scale() {
// SERVER-95977 Stack overflow with many boundaries in $bucket
return Math.min(1000, super.scale());
}
pipeline() {
let boundaries = range(this.scale() + 1);
return [{
$bucket: {
groupBy: "$f0",
boundaries: boundaries,
default: "default",
output: {"count": {$sum: 1}}
}
}];
}
result(dataset) {
let result = [];
for (let i = 0; i < this.scale(); i++) {
result.push({_id: i, count: 1});
}
if (this.scale() < dataset.scale()) {
// The default bucket will collect all values above the largest boundary
result.push({_id: "default", count: dataset.scale() - this.scale()});
}
return result;
}
}
export class WorkloadBucketManyOutputs extends PipelineWorkload {
/** Many outputs in a single $bucket stage */
pipeline() {
let outputs = {};
for (let i = 0; i < this.scale(); i++) {
outputs[`f${i}`] = {$min: `$f${i}`};
}
return [
{$bucket: {groupBy: "$f0", boundaries: [0, 1], default: "default", output: outputs}}
];
}
result() {
let row = {"_id": 0};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadBucketAutoManyBuckets extends PipelineWorkload {
/** Many buckets in a single $bucketAuto stage */
pipeline() {
return [
{$bucketAuto: {groupBy: "$f0", buckets: this.scale(), output: {"count": {$sum: 1}}}}
];
}
result() {
let result = [];
for (let i = 0; i < this.scale(); i++) {
if (i == this.scale() - 1) {
result.push({_id: {min: i, max: i}, count: 1});
} else {
result.push({_id: {min: i, max: i + 1}, count: 1});
}
}
return result;
}
}
export class WorkloadBucketAutoManyOutputs extends PipelineWorkload {
/** Many outputs in a single $bucketAuto stage */
pipeline() {
let outputs = {};
for (let i = 0; i < this.scale(); i++) {
outputs[`f${i}`] = {$min: `$f${i}`};
}
return [{$bucketAuto: {groupBy: "$f0", buckets: 1, output: outputs}}];
}
result() {
let row = {"_id": {min: 0, max: 0}};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadSetWindowFieldsManyPartitions extends PipelineWorkload {
pipeline() {
let partitions = [];
for (let i = 0; i < this.scale(); i++) {
partitions.push({$toString: `$f${i}`});
}
return [
{$setWindowFields: {partitionBy: {$concat: partitions}, output: {"f0": {$max: "$f0"}}}},
{$unset: "_id"}
];
}
result() {
let row = {"f0": this.scale() - 1};
let result = [];
for (let i = 0; i < this.scale(); i++) {
result.push(row);
}
return result;
}
}
export class WorkloadSetWindowFieldsManySortBy extends PipelineWorkload {
scale() {
// "too many compound keys"
return 32;
}
pipeline() {
let sortByFields = {};
for (let i = 0; i < this.scale(); i++) {
sortByFields[`f${i}`] = 1;
}
return [
{$setWindowFields: {sortBy: sortByFields, output: {"f0": {$max: "$f0"}}}},
{$unset: "_id"}
];
}
result(dataset) {
let row = {};
for (let i = 0; i < dataset.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadSetWindowFieldsManyOutputs extends PipelineWorkload {
pipeline() {
let outputs = {};
for (let i = 0; i < this.scale(); i++) {
outputs[`f${i}`] = {$max: `$f${i}`};
}
return [{$setWindowFields: {output: outputs}}, {$unset: "_id"}];
}
result() {
let row = {};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadDensifyManyFields extends PipelineWorkload {
/** $densify with many $partitionByFields */
pipeline() {
let partitionByFields = [];
for (let i = 0; i < this.scale(); i++) {
partitionByFields.push(`f${i}`);
}
return [
{
$densify: {
field: "densify",
partitionByFields: partitionByFields,
range: {bounds: "full", step: 1}
}
},
{$project: {_id: 0}}
];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [result];
}
}
export class WorkloadFillManyPartitionFields extends PipelineWorkload {
/** $fill with many partitionByFields fields */
pipeline() {
let partitionByFields = [];
for (let i = 0; i < this.scale(); i++) {
partitionByFields.push(`f${i}`);
}
return [
{$fill: {partitionByFields: partitionByFields, output: {f0: {"value": 1}}}},
{$unset: "_id"}
];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [result];
}
}
export class WorkloadTopK extends PipelineWorkload {
pipeline() {
let args = {input: "$f0", n: this.scale()};
return [
{
"$group": {
"_id": null,
"minN": {$minN: args},
"maxN": {$maxN: args},
"firstN": {$firstN: args},
"lastN": {$lastN: args},
"topN": {$topN: {n: this.scale(), output: "$f0", sortBy: {"f0": 1}}},
"bottomN": {$bottomN: {n: this.scale(), output: "$f0", sortBy: {"f0": 1}}},
}
},
{$unset: "_id"}
];
}
result() {
let vals = range(this.scale());
let reversed = [...vals];
reversed.reverse();
return [{minN: vals, maxN: reversed, firstN: vals, lastN: vals, topN: vals, bottomN: vals}];
}
}

View File

@ -0,0 +1,130 @@
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
export class LongPipelineWorkload extends PipelineWorkload {
/**
* A pipeline can not have more than 1000 stages,
* and we usually tack $unset at the end
*/
scale() {
return Math.min(super.scale(), 990);
}
}
export class WorkloadManyCollectionsInUnionWith extends LongPipelineWorkload {
/** $unionWith of many collections. */
// A pipeline can not have more than 1000 stages, and we tack $unset at the end
scale() {
return Math.min(super.scale(), 999);
}
pipeline() {
let pipeline = [];
for (let i = 0; i < this.scale(); i++) {
const collName = `coll${i}`;
pipeline.push({$unionWith: collName});
}
pipeline.push({$unset: "_id"});
return pipeline;
}
result() {
let result = [{f0: 1}];
for (let i = 0; i < this.scale(); i++) {
result.push({f0: 1});
}
return result;
}
}
export class WorkloadManyCollectionsInLookupBushy extends LongPipelineWorkload {
/** Many $lookup-s where each new collection is joined to the same column. */
scale() {
// Too many $lookups result in "errmsg" : "BSONObj size: 53097740 (0x32A350C) is invalid.
// Size must be between 0 and 16793600(16MB) First element: slots: \"$$RESULT=s7202 env: {
// }\"",
return Math.min(500, super.scale());
}
pipeline() {
let pipeline = [];
let unsetList = ["_id"];
for (let i = 1; i < this.scale(); i++) {
pipeline.push({
$lookup:
{from: `coll${i}`, localField: "f0", foreignField: "f0", as: `asField_${i}`}
});
// Remove all _id fields
unsetList.push(`asField_${i}._id`);
}
pipeline.push({$unset: unsetList});
return pipeline;
}
result() {
let result = {f0: 1};
for (let i = 1; i < this.scale(); i++) {
result[`asField_${i}`] = [{f0: 1}];
}
return [result];
}
}
export class WorkloadManyMatchStages extends LongPipelineWorkload {
/** Many $match stages. */
pipeline() {
let pipeline = [];
for (let i = 0; i < this.scale(); i++) {
pipeline.push({$match: {[`f${i}`]: i}});
}
pipeline.push({$unset: "_id"});
return pipeline;
}
}
export class WorkloadFacetManyStages extends LongPipelineWorkload {
/** $facet with many pipeline stages */
pipeline() {
let stages = [{$limit: 1}];
for (let i = 0; i < this.scale(); i++) {
stages.push({$addFields: {[`f${i}`]: i}});
}
stages.push({$project: {_id: 0}});
return [{$facet: {f0: stages}}];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [{"f0": [result]}];
}
}
export class WorkloadAddFields extends LongPipelineWorkload {
/** Many individual $addFields stages */
pipeline() {
let pipeline = [];
for (let i = 0; i < this.scale(); i++) {
pipeline.push({$addFields: {[`f${i}`]: i}});
}
pipeline.push({$limit: this.scale()}, {$unset: "_id"});
return pipeline;
}
result() {
let result = [];
let row = {};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = i;
}
for (let i = 0; i < this.scale(); i++) {
result.push(row);
}
return result;
}
}

View File

@ -0,0 +1,246 @@
import {range} from "jstests/product_limits/libs/util.js";
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
export class WorkloadAndOverSingleField extends PipelineWorkload {
scale() {
// SERVER-96119 SBE: Stack overflow with many conditions to a $match, index
return Math.min(1000, super.scale());
}
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
match.push({'f0': {$lt: this.scale() + i}});
}
return [{$match: {$and: match}}, {$unset: "_id"}];
}
result() {
return range(this.scale()).map((i) => ({f0: i}));
}
}
export class WorkloadOrOverSingleField extends PipelineWorkload {
scale() {
// SERVER-96119 SBE: Stack overflow with many conditions to a $match, index
return Math.min(1000, super.scale());
}
pipeline(dataset) {
let match = [];
for (let i = 0; i < this.scale(); i++) {
// Those conditions all evaluate to False
match.push({'f0': {$gt: this.scale() + i}});
}
// This condition evaluates to True
match.push({'f0': {$lt: dataset.scale()}});
return [{$match: {$or: match}}, {$unset: "_id"}];
}
}
export class WorkloadAndPlusOrOverSingleField extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
match.push({$or: [{f0: {$lt: this.scale() - i}}, {f0: {$gte: 0}}]});
}
return [{$match: {$and: match}}, {$unset: "_id"}];
}
}
export class WorkloadOrPlusAndOverSingleField extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
// These conditions all evaluate to False
match.push({$and: [{f0: {$gt: this.scale()}}, {f0: {$gte: 0}}]});
}
// This condition evaluates to True
match.push({$and: [{f0: {$lt: this.scale()}}, {f0: {$gte: 0}}]});
return [{$match: {$or: match}}, {$unset: "_id"}];
}
}
export class WorkloadAndOverManyFields extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
match.push({[`f${i}`]: {$lt: 65535}});
}
return [{$match: {$and: match}}, {$unset: "_id"}];
}
}
export class WorkloadOrOverManyFields extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
// All those conditions evaluate to False
match.push({[`f${i}`]: {$gt: 65535}});
}
// This condition evaluates to True
match.push({[`f${this.scale() - 1}`]: {$gt: 0}});
return [{$match: {$or: match}}, {$unset: "_id"}];
}
}
export class WorkloadAndPlusOrOverManyFields extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
match.push({$or: [{[`f${i}`]: {$lt: this.scale()}}, {[`f${i}`]: {$gte: 0}}]});
}
return [{$match: {$and: match}}, {$unset: "_id"}];
}
}
export class WorkloadOrPlusAndOverManyFields extends PipelineWorkload {
pipeline() {
let match = [];
for (let i = 0; i < this.scale(); i++) {
// All those conditions evaluate to False
match.push({$and: [{[`f${i}`]: {$gt: this.scale()}}, {[`f${i}`]: {$gte: 0}}]});
}
// This condition evaluates to True
match.push({
$and: [
{[`f${this.scale() - 1}`]: {$lt: this.scale()}},
{[`f${this.scale() - 1}`]: {$gte: 0}}
]
});
return [{$match: {$or: match}}, {$unset: "_id"}];
}
}
export class WorkloadMatchOverManyFields extends PipelineWorkload {
/** $match with individual equality conditions over many fields. */
pipeline() {
let match = {};
for (let i = 0; i < this.scale(); i++) {
match[`f${i}`] = i;
}
return [{$match: match}, {$unset: "_id"}];
}
}
export class WorkloadMatchLongPath extends PipelineWorkload {
scale() {
return 100;
}
path() {
let path = [];
for (let i = 0; i < this.scale(); i++) {
path.push(`f${i}`);
}
return path.join('.');
}
pipeline() {
return [{$match: {[this.path()]: "abc"}}, {$unset: "_id"}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadIn extends PipelineWorkload {
/** $in */
pipeline() {
return [{$match: {f0: {$in: range(this.scale())}}}, {$unset: "_id"}];
}
}
export class WorkloadNin extends PipelineWorkload {
/** $nin */
pipeline() {
let ninList = [];
for (let i = 0; i < this.scale(); i++) {
ninList.push(this.scale() + i);
}
return [{$match: {f0: {$nin: ninList}}}, {$unset: "_id"}];
}
}
export class WorkloadManyIns extends PipelineWorkload {
/** Many individual $in */
pipeline() {
let inList = [];
for (let i = 0; i < this.scale(); i++) {
inList.push({f0: {$in: [i]}});
}
return [{$match: {$or: inList}}, {$unset: "_id"}];
}
}
export class WorkloadRegexInIn extends PipelineWorkload {
/** Multiple regexps in an $in */
pipeline() {
let inList = [];
for (let i = 0; i < this.scale(); i++) {
inList.push(new RegExp(String.raw`${i}|.*`));
}
return [{$match: {f0: {$in: inList}}}, {$unset: "_id"}];
}
}
export class WorkloadRegex extends PipelineWorkload {
scale() {
// Regular expression is invalid: pattern string is longer than the limit set by the
// application
return 1000;
}
pipeline() {
let regexStr = new RegExp(range(this.scale()).join("|"));
return [{$match: {f0: {$regex: regexStr}}}, {$unset: "_id"}];
}
}
export class WorkloadExists extends PipelineWorkload {
pipeline() {
let existsList = {};
for (let i = 0; i < this.scale(); i++) {
existsList[`f${i}`] = {$exists: true};
}
return [{$match: existsList}, {$count: "cnt"}];
}
result() {
return [{"cnt": 1}];
}
}
export class WorkloadLongValue extends PipelineWorkload {
pipeline(dataset) {
return [{$match: {f0: 'x'.repeat(dataset.scale()) + ' x'}}, {$unset: "_id"}];
}
}

View File

@ -0,0 +1,86 @@
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
export class WorkloadConcat extends PipelineWorkload {
/** $concat */
pipeline() {
let concat = [];
for (let i = 0; i < this.scale(); i++) {
concat.push({$toString: `$f${i}`});
}
return [{$project: {"concat": {$concat: concat}, _id: 0}}];
}
result() {
let concat = "";
for (let i = 0; i < this.scale(); i++) {
concat = concat + i;
}
return [{concat: concat}];
}
}
export class WorkloadSwitch extends PipelineWorkload {
/**
* $switch with many branches. We explicitly generate conditions that
* are false in order to cause all branches to be attempted.
*/
scale() {
// SERVER-96119 SBE: Stack overflow with many conditions to a $match, index
return Math.min(1000, super.scale());
}
pipeline() {
let branches = [];
for (let i = 0; i < this.scale(); i++) {
branches.push({case: {$ne: [`$f${i}`, i]}, then: i});
}
return [
{$project: {"result": {$switch: {branches: branches, default: "no match"}}}},
{$unset: "_id"}
];
}
result() {
return [{"result": "no match"}];
}
}
export class WorkloadCond extends PipelineWorkload {
/**
* $cond with many levels of nesting
*/
scale() {
return Math.min(70, super.scale()); // Exceeded depth limit of 150 when converting js
// object to BSON. Do you have a cycle?
}
pipeline() {
let cond = "match";
for (let i = 0; i < this.scale(); i++) {
cond = {$cond: {if: {$eq: [`$f${i}`, i]}, then: cond, else: "no match"}};
}
return [{$project: {"result": cond}}, {$unset: "_id"}];
}
result() {
return [{"result": "match"}];
}
}
export class WorkloadRange extends PipelineWorkload {
scale() {
return 5000000;
}
pipeline() {
return [{$project: {_id: 0, range: {$size: {$range: [0, this.scale()]}}}}];
}
result() {
return [{"range": this.scale()}];
}
}

View File

@ -0,0 +1,324 @@
import {range} from "jstests/product_limits/libs/util.js";
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
export class WorkloadFillManySortFields extends PipelineWorkload {
/** $fill with many sortBy fields */
pipeline() {
let sortByFields = {};
for (let i = 0; i < this.scale(); i++) {
sortByFields[`f${i}`] = 1;
}
return [{$fill: {sortBy: sortByFields, output: {f0: {"value": 1}}}}, {$unset: "_id"}];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [result];
}
}
export class WorkloadFillManyOutputs extends PipelineWorkload {
/** $fill with many outputs */
pipeline() {
let output = {};
for (let i = 0; i < this.scale(); i++) {
output[`f${i}`] = {method: "linear"};
}
return [{$fill: {sortBy: {"f0": 1}, output: output}}, {$unset: "_id"}];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = i;
}
return [result];
}
}
export class WorkloadMergeManyLet extends PipelineWorkload {
/** $merge with many let */
pipeline() {
let letList = {};
for (let i = 0; i < this.scale(); i++) {
letList[`f${i}`] = i;
}
return [{
$merge: {
into: this.constructor.name,
whenMatched: [{$addFields: {"foo": "bar"}}],
let : letList
}
}];
}
result() {
return [];
}
}
export class WorkloadLetManyVars extends PipelineWorkload {
/* Use many $lets with many variables, each with a complex expression.
*/
scale() {
// Object size exceeds limit of 16793600 bytes.
return 50;
}
pipeline() {
let condList = range(this.scale()).map((i) => ({$eq: [`$f${i}`, i]}));
let varsList = {};
for (let i = 0; i < this.scale(); i++) {
varsList[`v${i}`] = {"$and": condList};
}
let inList = range(this.scale()).map((i) => (`$$v${i}`));
let letList = {};
for (let i = 0; i < this.scale(); i++) {
letList[`max${i}`] = {$let: {vars: varsList, in : {$max: inList}}};
}
return [{$project: letList}, {$unset: "_id"}];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`max${i}`] = true;
}
return [result];
}
}
export class WorkloadProjectManyExpressions extends PipelineWorkload {
/** One large $project stage */
pipeline() {
let project = {};
for (let i = 0; i < this.scale(); i++) {
project[`f${i}`] = 'a';
}
project['_id'] = 0;
return [{$project: project}];
}
result() {
let row = {};
for (let i = 0; i < this.scale(); i++) {
row[`f${i}`] = 'a';
}
return [row];
}
}
export class WorkloadProjectManyFields extends PipelineWorkload {
/** One large $project stage with many fields */
pipeline() {
let project = {};
for (let i = 0; i < this.scale(); i++) {
project[`f${i}`] = 0;
}
return [{$project: project}, {$unset: "_id"}];
}
result() {
// We projected away everything
return [{}];
}
}
export class WorkloadNestedProject extends PipelineWorkload {
// BSONDepth::kDefaultMaxAllowableDepth = 200
scale() {
return 190;
}
pipeline() {
let project = range(this.scale());
return [
{$project: {[project.join(".")]: "abc"}},
{$unset: "_id"},
{$unset: project.join(".")},
{$count: "cnt"}
];
}
result() {
return [{"cnt": 1}];
}
}
//
// $replaceRoot
//
export class WorkloadReplaceRoot extends PipelineWorkload {
/** One large $replaceRoot stage */
pipeline() {
let replaceRoot = {};
for (let i = 0; i < this.scale(); i++) {
replaceRoot[`f${i}`] = i;
}
return [{$replaceRoot: {newRoot: replaceRoot}}, {$count: "cnt"}];
}
result() {
return [{"cnt": this.scale()}];
}
}
export class WorkloadSort extends PipelineWorkload {
scale() {
// "too many compound keys"
return 32;
}
pipeline() {
let sortKey = {};
for (let i = 0; i < this.scale(); i++) {
sortKey[`f${i}`] = 1;
}
return [{$sort: sortKey}, {$unset: "_id"}];
}
result(dataset) {
let row = {};
for (let i = 0; i < dataset.scale(); i++) {
row[`f${i}`] = i;
}
return [row];
}
}
export class WorkloadSortByCount extends PipelineWorkload {
sortKey() {
let sortKey = [];
for (let i = 0; i < this.scale(); i++) {
sortKey.push(`f${i}`);
}
return sortKey;
}
pipeline() {
return [{$sortByCount: {$concat: this.sortKey()}}];
}
result() {
const concatKey = this.sortKey().join("");
return [{_id: concatKey, count: 1}];
}
}
export class WorkloadUnset extends PipelineWorkload {
pipeline() {
let unsetList = [];
for (let i = 0; i < this.scale(); i++) {
unsetList.push(`f${i}`);
}
unsetList.push("_id");
return [{$unset: unsetList}];
}
result() {
// We projected away everything
return [{}];
}
}
export class WorkloadUnwind extends PipelineWorkload {
pipeline() {
let unsetList = [];
for (let i = 0; i < this.scale(); i++) {
unsetList.push(`f${i}`);
}
unsetList.push("_id");
return [{$unwind: "$f0"}, {$unset: "_id"}];
}
result() {
let result = [];
for (let i = 0; i < this.scale(); i++) {
result.push({"f0": i});
}
return result;
}
}
export class WorkloadManyDocuments extends PipelineWorkload {
pipeline() {
let documents = [];
for (let i = 0; i < this.scale(); i++) {
documents.push({[`f${i}`]: i});
}
return [{$documents: documents}];
}
result() {
let result = [];
for (let i = 0; i < this.scale(); i++) {
result.push({[`f${i}`]: i});
}
return result;
}
}
export class WorkloadFacetManyFields extends PipelineWorkload {
/** $facet with many fields */
pipeline() {
let fields = {};
for (let i = 0; i < this.scale(); i++) {
fields[`f${i}`] = [{$project: {[`f${i}`]: 1, _id: 0}}];
}
return [{$facet: fields}];
}
result() {
let result = {};
for (let i = 0; i < this.scale(); i++) {
result[`f${i}`] = [{[`f${i}`]: i}];
}
return [result];
}
}
export class WorkloadGetField extends PipelineWorkload {
pipeline() {
return [
{
$project:
{"getField": {$getField: {field: `f${this.scale() - 1}`, input: "$$CURRENT"}}}
},
{$unset: "_id"}
];
}
result() {
return [{getField: this.scale() - 1}];
}
}
export class WorkloadLongFieldName extends PipelineWorkload {
longString() {
return 'x'.repeat(this.scale());
}
pipeline() {
return [{$count: this.longString()}];
}
result() {
return [{[this.longString()]: this.scale()}];
}
}

View File

@ -0,0 +1,34 @@
import {range} from "jstests/product_limits/libs/util.js";
import {PipelineWorkload} from "jstests/product_limits/libs/workload.js";
class WorkloadTextSearch extends PipelineWorkload {
pipeline(dataset) {
return [{$match: {$text: {$search: this.searchString(dataset)}}}, {$unset: "_id"}];
}
numbersAsStrings() {
// Those integers are not present in the dataset
return range(this.scale()).map((i) => (`${i}`));
}
}
export class WorkloadTextSearchLongString extends WorkloadTextSearch {
searchString(dataset) {
return 'x'.repeat(dataset.scale());
}
}
export class WorkloadTextSearchManyWords extends WorkloadTextSearch {
searchString() {
let words = this.numbersAsStrings();
words.push('x');
return words.join(' ');
}
}
export class WorkloadTextSearchNegation extends WorkloadTextSearch {
searchString() {
let words = this.numbersAsStrings().map((s) => (`-${s}`));
words.push('x');
return words.join(' ');
}
}

View File

@ -0,0 +1,5 @@
export const DEFAULT_SCALE = 10000;
export function range(high) {
return [...Array(high).keys()];
}

View File

@ -0,0 +1,52 @@
import {DEFAULT_SCALE} from "jstests/product_limits/libs/util.js";
export class Workload {
scale() {
return DEFAULT_SCALE;
}
collection() {
return "coll0";
}
check(dataset, actualResult) {
actualResult.sort();
let expectedResult = this.result(dataset);
expectedResult.sort();
print("Comparison start ...");
assert.eq(expectedResult, actualResult);
print("Comparison complete.");
}
result(dataset) {
// By default we assume the workload returns the complete dataset
return dataset.data();
}
}
export class PipelineWorkload extends Workload {
runWorkload(dataset, _, db) {
const coll = db.getCollection(this.collection());
const pipeline = this.pipeline(dataset);
printjsononeline(pipeline);
if (!pipeline[0].hasOwnProperty("$documents")) {
try {
coll.explain("allPlansExecution").aggregate(pipeline);
} catch (error) {
/// Large explains() can not legitimately fit in a BSONObject
printjsononeline(error.codeName);
assert(error.code === ErrorCodes.BSONObjectTooLarge, error);
}
}
const startTime = Date.now();
const cursor = pipeline[0].hasOwnProperty("$documents") ? db.aggregate(pipeline)
: coll.aggregate(pipeline);
const actualResult = cursor.toArray();
const duration = Date.now() - startTime;
print(`${dataset.constructor.name}.${this.constructor.name} took ${duration} ms.`);
this.check(dataset, actualResult);
print("Pipeline execution complete.");
}
}

View File

@ -0,0 +1,16 @@
/**
* Tests the limits of the product in various dimensions by generating degenerate queries and
* running them.
*
* @tags: [
* # Pipeline length is limited to 200 in Atlas
* simulate_atlas_proxy_incompatible,
* ]
*/
import {DATASETS} from "jstests/product_limits/libs/datasets.js";
for (const dataset of DATASETS) {
let ds = new dataset;
ds.runDataset();
}