0
0
mirror of https://github.com/mongodb/mongo.git synced 2024-11-21 12:39:08 +01:00

SERVER-94916 Develop core_sharding smoke tests for analyzeShardKey (#29220)

GitOrigin-RevId: 22d17da8b04b54bff51adea151482152fa903200
This commit is contained in:
Israel Hsu 2024-11-18 20:09:45 -05:00 committed by MongoDB Bot
parent b7c2aeebb0
commit b1c834dbca
9 changed files with 817 additions and 629 deletions

View File

@ -8,6 +8,7 @@ selector:
roots:
# Analyze shard key smoke tests
- jstests/core_sharding/analyze_shard_key/analyze_shard_key.js
- jstests/core_sharding/analyze_shard_key/cardinality_and_frequency.js
- jstests/core_sharding/analyze_shard_key/configure_query_analyzer_basic.js
- jstests/core_sharding/analyze_shard_key/configure_query_analyzer_cmd_validation.js
- jstests/core_sharding/analyze_shard_key/configure_query_analyzer_persistence.js

View File

@ -8,6 +8,7 @@
* ]
*/
import {
setParameterOpts,
testExistingShardedCollection,
testExistingUnshardedCollection,
testNonExistingCollection
@ -15,14 +16,6 @@ import {
const shardNames = db.adminCommand({listShards: 1}).shards.map(shard => shard._id);
const setParameterOpts = {
analyzeShardKeyNumRanges: 100
};
// The sampling-based initial split policy needs 10 samples per split point so
// 10 * analyzeShardKeyNumRanges is the minimum number of distinct shard key values that the
// collection must have for the command to not fail to generate split points.
const numDocs = 10 * setParameterOpts.analyzeShardKeyNumRanges;
{
const dbName = db.getName();
const mongos = db.getMongo();
@ -31,11 +24,11 @@ const numDocs = 10 * setParameterOpts.analyzeShardKeyNumRanges;
assert.commandWorked(db.adminCommand(setParamCmd));
const testCases = [{conn: mongos, isSupported: true, isMongos: true}];
testNonExistingCollection(dbName, testCases);
testExistingUnshardedCollection(dbName, mongos, testCases, numDocs);
testExistingUnshardedCollection(dbName, mongos, testCases);
if (shardNames.length < 2) {
print(jsTestName() +
" testExistingShardedCollection will not run; at least 2 shards are required.");
} else {
testExistingShardedCollection(dbName, mongos, testCases, numDocs);
testExistingShardedCollection(dbName, mongos, testCases);
}
}

View File

@ -0,0 +1,56 @@
/**
* Tests that the analyzeShardKey command returns correct cardinality and frequency metrics when
* no document sampling is involved.
*
* @tags: [
* assumes_balancer_off,
* does_not_support_stepdowns,
* requires_fcv_70,
* ]
*/
import {
numMostCommonValues
} from "jstests/sharding/analyze_shard_key/libs/cardinality_and_frequency_common.js";
import {
testAnalyzeCandidateShardKeysShardedCollection,
testAnalyzeCandidateShardKeysUnshardedCollection,
testAnalyzeCurrentShardKeys,
} from "jstests/sharding/analyze_shard_key/libs/cardinality_and_frequency_common_tests.js";
const mongos = db.getMongo();
const shardNames = db.adminCommand({listShards: 1}).shards.map(shard => shard._id);
if (shardNames.length < 2) {
print(jsTestName() + " will not run; at least 2 shards are required.");
quit();
}
// Get the number of nodes in a shard's replica set
const shardMap = db.adminCommand({getShardMap: 1});
let numNodesPerRS = 0;
for (const [key, value] of Object.entries(shardMap.map)) {
if (key !== "config") {
const nodes = value.split(",").length;
if (numNodesPerRS == 0) {
numNodesPerRS = nodes;
} else {
assert(nodes >= numNodesPerRS);
}
}
}
// The write concern to use when inserting documents into test collections. Waiting for the
// documents to get replicated to all nodes is necessary since mongos runs the analyzeShardKey
// command with readPreference "secondaryPreferred".
const writeConcern = {
w: numNodesPerRS
};
const setParameterOpts = {
analyzeShardKeyNumMostCommonValues: numMostCommonValues
};
const setParamCmd = Object.assign({setParameter: 1}, setParameterOpts);
assert.commandWorked(db.adminCommand(setParamCmd));
testAnalyzeCandidateShardKeysUnshardedCollection(mongos, {}, writeConcern);
testAnalyzeCandidateShardKeysShardedCollection(mongos, null, writeConcern);
testAnalyzeCurrentShardKeys(mongos, null, writeConcern);

View File

@ -7,19 +7,13 @@ import {FixtureHelpers} from "jstests/libs/fixture_helpers.js";
import {ReplSetTest} from "jstests/libs/replsettest.js";
import {ShardingTest} from "jstests/libs/shardingtest.js";
import {
setParameterOpts,
testExistingShardedCollection,
testExistingUnshardedCollection,
testNonExistingCollection
} from "jstests/sharding/analyze_shard_key/libs/analyze_shard_key_common_tests.js";
const setParameterOpts = {
analyzeShardKeyNumRanges: 100
};
const dbNameBase = "testDb";
// The sampling-based initial split policy needs 10 samples per split point so
// 10 * analyzeShardKeyNumRanges is the minimum number of distinct shard key values that the
// collection must have for the command to not fail to generate split points.
const numDocs = 10 * setParameterOpts.analyzeShardKeyNumRanges;
function testNotSupportReadWriteConcern(writeConn, testCases) {
const dbName = dbNameBase;
@ -77,8 +71,8 @@ function testNotSupportReadWriteConcern(writeConn, testCases) {
});
testNonExistingCollection(dbNameBase, testCases);
testExistingUnshardedCollection(dbNameBase, st.s, testCases, numDocs);
testExistingShardedCollection(dbNameBase, st.s, testCases, numDocs);
testExistingUnshardedCollection(dbNameBase, st.s, testCases);
testExistingShardedCollection(dbNameBase, st.s, testCases);
testNotSupportReadWriteConcern(st.s, testCases);
st.stop();
@ -104,7 +98,7 @@ if (jsTestOptions().useAutoBootstrapProcedure) { // TODO: SERVER-80318 Remove t
testCases.push({conn: node, isSupported: true, isReplSetMongod: true});
});
testExistingUnshardedCollection(dbNameBase, primary, testCases, numDocs);
testExistingUnshardedCollection(dbNameBase, primary, testCases);
testNonExistingCollection(dbNameBase, testCases);
testNotSupportReadWriteConcern(primary, testCases);
@ -142,7 +136,7 @@ if (!TestData.auth) {
// The analyzeShardKey command is not supported on standalone mongod.
const testCases = [{conn: mongod, isSupported: false}];
testExistingUnshardedCollection(dbNameBase, mongod, testCases, numDocs);
testExistingUnshardedCollection(dbNameBase, mongod, testCases);
MongoRunner.stopMongod(mongod);
}

View File

@ -7,136 +7,13 @@
import {ReplSetTest} from "jstests/libs/replsettest.js";
import {ShardingTest} from "jstests/libs/shardingtest.js";
import {
AnalyzeShardKeyUtil
} from "jstests/sharding/analyze_shard_key/libs/analyze_shard_key_util.js";
import {
assertAggregateQueryPlans,
getMongodConns,
numMostCommonValues
} from "jstests/sharding/analyze_shard_key/libs/cardinality_and_frequency_common.js";
// Define base test cases. For each test case:
// - 'shardKey' is the shard key being analyzed.
// - 'indexKey' is the index that the collection has.
// - 'indexOptions' is the additional options for the index.
const shardKeyPrefixedIndexTestCases = [
// Test non-compound shard keys with a shard key index.
{shardKey: {a: 1}, indexKey: {a: 1}, expectMetrics: true},
{shardKey: {a: "hashed"}, indexKey: {a: "hashed"}, expectMetrics: true},
{shardKey: {"a.x": 1}, indexKey: {"a.x": 1}, expectMetrics: true},
{shardKey: {"a.x.y": 1}, indexKey: {"a.x.y": 1}, expectMetrics: true},
// Test compound shard keys with a shard key index.
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: 1}, expectMetrics: true},
{shardKey: {"a.x": 1, "b": "hashed"}, indexKey: {"a.x": 1, "b": "hashed"}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed", b: 1}, indexKey: {"a.x.y": "hashed", b: 1}, expectMetrics: true},
// Test non-compound and compound shard keys with a shard key prefixed index.
{shardKey: {a: 1}, indexKey: {a: 1, b: 1}, expectMetrics: true},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: 1, c: 1}, expectMetrics: true},
{shardKey: {"a.x": 1}, indexKey: {"a.x": 1, b: "hashed"}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed"}, indexKey: {"a.x.y": "hashed", b: 1}, expectMetrics: true},
// Test shard keys with _id.
{shardKey: {_id: 1}, indexKey: {_id: 1}, expectMetrics: true},
{shardKey: {_id: 1, a: 1}, indexKey: {_id: 1, a: 1}, expectMetrics: true},
// Test shard key indexes with simple collation.
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {collation: {locale: "simple"}},
expectMetrics: true
},
];
const compatibleIndexTestCases = [
// Test non-compound and compound shard keys with a compatible index.
{shardKey: {a: 1}, indexKey: {a: "hashed"}, expectMetrics: true},
{shardKey: {a: "hashed"}, indexKey: {a: 1}, expectMetrics: true},
{shardKey: {"a.x": 1, b: "hashed"}, indexKey: {"a.x": "hashed", b: 1}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed", b: 1}, indexKey: {"a.x.y": 1, b: "hashed"}, expectMetrics: true},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: "hashed", c: 1}, expectMetrics: true},
// Test shard keys with _id.
{shardKey: {_id: "hashed"}, indexKey: {_id: 1}, expectMetrics: true},
// Test shard key indexes with simple collation.
{
shardKey: {a: 1},
indexKey: {a: "hashed"},
indexOptions: {collation: {locale: "simple"}},
expectMetrics: true
},
];
const noIndexTestCases = [
// Test non-compound and compound shard keys without a shard key prefixed index or a compatible
// index.
{shardKey: {a: 1}, expectMetrics: false},
{shardKey: {a: 1, b: 1}, indexKey: {b: 1}, expectMetrics: false},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, c: 1}, expectMetrics: false},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {collation: {locale: "fr"}}, // non-simple collation.
expectMetrics: false
},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {sparse: true},
expectMetrics: false,
},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {partialFilterExpression: {a: {$gte: 1}}},
expectMetrics: false
},
];
// Construct test cases from the base test cases above.
const candidateKeyTestCases = [];
const currentKeyTestCases = [];
for (let testCaseBase of shardKeyPrefixedIndexTestCases) {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
testCase.expectUnique = false;
candidateKeyTestCases.push(testCase);
currentKeyTestCases.push(testCase);
}
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(testCaseBase.indexKey)) {
// Hashed indexes cannot have a uniqueness constraint.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = true;
testCase.expectUnique =
Object.keys(testCaseBase.shardKey).length == Object.keys(testCaseBase.indexKey).length;
candidateKeyTestCases.push(testCase);
currentKeyTestCases.push(testCase);
}
}
for (let testCaseBase of compatibleIndexTestCases) {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
testCase.expectUnique = false;
candidateKeyTestCases.push(testCase);
}
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(testCaseBase.indexKey)) {
// Hashed indexes cannot have a uniqueness constraint.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = true;
testCase.expectUnique =
Object.keys(testCaseBase.shardKey).length == Object.keys(testCaseBase.indexKey).length;
candidateKeyTestCases.push(testCase);
}
}
for (let testCaseBase of noIndexTestCases) {
// No metrics are expected for these test cases so there is no need to test with both non-unique
// and unique index.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
candidateKeyTestCases.push(testCase);
}
import {
testAnalyzeCandidateShardKeysShardedCollection,
testAnalyzeCandidateShardKeysUnshardedCollection,
testAnalyzeCurrentShardKeys,
} from "jstests/sharding/analyze_shard_key/libs/cardinality_and_frequency_common_tests.js";
const numNodesPerRS = 2;
@ -147,386 +24,6 @@ const writeConcern = {
w: numNodesPerRS
};
/**
* Returns an object where each field name is set to the given value.
*/
function makeDocument(fieldNames, value) {
const doc = {};
fieldNames.forEach(fieldName => {
AnalyzeShardKeyUtil.setDottedField(doc, fieldName, value);
});
return doc;
}
/**
* Tests the cardinality and frequency metrics for a shard key that either has a non-unique
* supporting/compatible index or doesn't a supporting/compatible index.
*/
function testAnalyzeShardKeyNoUniqueIndex(conn, dbName, collName, currentShardKey, testCase) {
assert(!testCase.indexOptions.unique);
const ns = dbName + "." + collName;
const db = conn.getDB(dbName);
const coll = db.getCollection(collName);
const fieldNames = AnalyzeShardKeyUtil.getCombinedFieldNames(
currentShardKey, testCase.shardKey, testCase.indexKey);
const shardKeyContainsId = testCase.shardKey.hasOwnProperty("_id");
const isUnique = false;
const makeSubTestCase = (numDistinctValues) => {
const docs = [];
const mostCommonValues = [];
const maxFrequency = shardKeyContainsId ? 1 : numDistinctValues;
let sign = 1;
for (let i = 1; i <= numDistinctValues; i++) {
// Test with integer field half of time and object field half of the time.
const val = sign * i;
const doc = makeDocument(fieldNames, Math.random() > 0.5 ? val : {foo: val});
const frequency = shardKeyContainsId ? 1 : i;
for (let j = 1; j <= frequency; j++) {
docs.push(doc);
}
const isMostCommon = (maxFrequency - frequency) < numMostCommonValues;
if (testCase.expectMetrics && isMostCommon) {
mostCommonValues.push({
value: AnalyzeShardKeyUtil.extractShardKeyValueFromDocument(doc,
testCase.shardKey),
frequency
});
}
sign *= -1;
}
const metrics = {
numDocs: docs.length,
isUnique,
numDistinctValues,
mostCommonValues,
numMostCommonValues
};
return [docs, metrics];
};
// Analyze the shard key while the collection has less than 'numMostCommonValues' distinct shard
// key values.
const [docs0, metrics0] = makeSubTestCase(numMostCommonValues - 1);
assert.commandWorked(coll.insert(docs0, {writeConcern}));
const res0 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res0.keyCharacteristics, metrics0);
} else {
assert.commandFailedWithCode(res0, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has exactly 'numMostCommonValues' distinct shard
// key values.
const [docs1, metrics1] = makeSubTestCase(numMostCommonValues);
assert.commandWorked(coll.insert(docs1, {writeConcern}));
const res1 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res1.keyCharacteristics, metrics1);
} else {
assert.commandFailedWithCode(res1, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has more than 'numMostCommonValues' distinct shard
// key values.
const [docs2, metrics2] = makeSubTestCase(numMostCommonValues * 25);
assert.commandWorked(coll.insert(docs2, {writeConcern}));
const res2 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res2.keyCharacteristics, metrics2);
} else {
assert.commandFailedWithCode(res2, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
}
/**
* Tests the cardinality and frequency metrics for a shard key that has a unique
* supporting/compatible index.
*/
function testAnalyzeShardKeyUniqueIndex(conn, dbName, collName, currentShardKey, testCase) {
assert(testCase.indexOptions.unique);
assert(testCase.expectMetrics);
const ns = dbName + "." + collName;
const db = conn.getDB(dbName);
const coll = db.getCollection(collName);
const fieldNames = AnalyzeShardKeyUtil.getCombinedFieldNames(
currentShardKey, testCase.shardKey, testCase.indexKey);
const isUnique = testCase.expectUnique;
const makeSubTestCase = (numDistinctValues) => {
const docs = [];
const mostCommonValues = [];
let sign = 1;
for (let i = 1; i <= numDistinctValues; i++) {
// Test with integer field half of time and object field half of the time.
const val = sign * i;
const doc = makeDocument(fieldNames, Math.random() > 0.5 ? val : {foo: val});
docs.push(doc);
mostCommonValues.push({
value: AnalyzeShardKeyUtil.extractShardKeyValueFromDocument(doc, testCase.shardKey),
frequency: 1
});
sign *= -1;
}
const metrics = {
numDocs: docs.length,
isUnique,
numDistinctValues,
mostCommonValues,
numMostCommonValues
};
return [docs, metrics];
};
// Analyze the shard key while the collection has less than 'numMostCommonValues' distinct shard
// key values.
const [docs0, metrics0] = makeSubTestCase(numMostCommonValues - 1);
assert.commandWorked(coll.insert(docs0, {writeConcern}));
const res0 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res0.keyCharacteristics, metrics0);
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has exactly 'numMostCommonValues' distinct shard
// key values.
const [docs1, metrics1] = makeSubTestCase(numMostCommonValues);
assert.commandWorked(coll.insert(docs1, {writeConcern}));
const res1 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res1.keyCharacteristics, metrics1);
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has more than 'numMostCommonValues' distinct shard
// key values.
const [docs2, metrics2] = makeSubTestCase(numMostCommonValues * 25);
assert.commandWorked(coll.insert(docs2, {writeConcern}));
const res2 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res2.keyCharacteristics, metrics2);
assert.commandWorked(coll.remove({}, {writeConcern}));
}
function testAnalyzeCandidateShardKeysUnshardedCollection(conn, {rst, st}) {
const dbName = "testDb";
const collName = "testCollUnshardedCandidate";
const db = conn.getDB(dbName);
const coll = db.getCollection(collName);
const mongodConns = getMongodConns({rst, st});
jsTest.log(
`Testing candidate shard keys for an unsharded collection: ${tojson({dbName, collName})}`);
candidateKeyTestCases.forEach(testCaseBase => {
const testCase = Object.assign({}, testCaseBase);
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
jsTest.log(`Testing metrics for ${tojson({dbName, collName, testCase})}`);
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.createIndex(testCase.indexKey, testCase.indexOptions));
}
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(
conn, dbName, collName, null /* currentShardKey */, testCase);
} else {
testAnalyzeShardKeyNoUniqueIndex(
conn, dbName, collName, null /* currentShardKey */, testCase);
}
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(
mongodConns,
dbName,
collName,
testCase.comment,
// On a replica set, the analyzeShardKey command runs the aggregate commands locally,
// i.e. the commands do not go through the service entry point so do not get profiled.
testCase.expectMetrics && !rst /* expectEntries */);
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.dropIndex(testCase.indexKey));
}
});
assert.commandWorked(db.dropDatabase());
}
function testAnalyzeCandidateShardKeysShardedCollection(st) {
const dbName = "testDb";
const collName = "testCollShardedCandidate";
const ns = dbName + "." + collName;
const currentShardKey = {skey: 1};
const currentShardKeySplitPoint = {skey: 0};
const db = st.s.getDB(dbName);
const coll = db.getCollection(collName);
const mongodConns = getMongodConns({st});
jsTest.log(
`Testing candidate shard keys for a sharded collection: ${tojson({dbName, collName})}`);
assert.commandWorked(st.s.adminCommand({enableSharding: dbName, primaryShard: st.shard0.name}));
assert.commandWorked(st.s.adminCommand({shardCollection: ns, key: currentShardKey}));
assert.commandWorked(st.s.adminCommand({split: ns, middle: currentShardKeySplitPoint}));
assert.commandWorked(st.s.adminCommand(
{moveChunk: ns, find: currentShardKeySplitPoint, to: st.shard1.shardName}));
candidateKeyTestCases.forEach(testCaseBase => {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
return;
}
const testCase = Object.assign({}, testCaseBase);
if (currentShardKey && testCase.indexOptions.unique) {
// It is illegal to create a unique index that doesn't have the shard key as a prefix.
assert(testCase.indexKey);
testCase.shardKey = Object.assign({}, currentShardKey, testCase.shardKey);
testCase.indexKey = Object.assign({}, currentShardKey, testCase.indexKey);
}
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
jsTest.log(`Testing metrics for ${tojson({dbName, collName, currentShardKey, testCase})}`);
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.createIndex(testCase.indexKey, testCase.indexOptions));
}
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(st.s, dbName, collName, currentShardKey, testCase);
} else {
testAnalyzeShardKeyNoUniqueIndex(st.s, dbName, collName, currentShardKey, testCase);
}
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(mongodConns,
dbName,
collName,
testCase.comment,
testCase.expectMetrics /* expectEntries */);
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.dropIndex(testCase.indexKey));
}
});
assert.commandWorked(db.dropDatabase());
}
function testAnalyzeCurrentShardKeys(st) {
const dbName = "testDb";
const db = st.s.getDB(dbName);
const mongodConns = getMongodConns({st});
jsTest.log(`Testing current shard key for sharded collections: ${tojson({dbName})}`);
assert.commandWorked(st.s.adminCommand({enableSharding: dbName, primaryShard: st.shard0.name}));
let testNum = 0;
currentKeyTestCases.forEach(testCaseBase => {
const testCase = Object.assign({}, testCaseBase);
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
const collName = "testCollShardedCurrent-" + testNum++;
const ns = dbName + "." + collName;
const currentShardKey = testCase.shardKey;
const coll = st.s.getCollection(ns);
jsTest.log(`Testing metrics for ${tojson({dbName, collName, currentShardKey, testCase})}`);
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.createIndex(testCase.indexKey, testCase.indexOptions));
}
assert.commandWorked(st.s.adminCommand({shardCollection: ns, key: currentShardKey}));
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(currentShardKey)) {
let shardKeySplitPoint = {};
for (let fieldName in currentShardKey) {
shardKeySplitPoint[fieldName] = 0;
}
assert.commandWorked(st.s.adminCommand({split: ns, middle: shardKeySplitPoint}));
assert.commandWorked(st.s.adminCommand(
{moveChunk: ns, find: shardKeySplitPoint, to: st.shard1.shardName}));
}
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(st.s, dbName, collName, currentShardKey, testCase);
} else {
testAnalyzeShardKeyNoUniqueIndex(st.s, dbName, collName, currentShardKey, testCase);
}
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(mongodConns,
dbName,
collName,
testCase.comment,
testCase.expectMetrics /* expectEntries */);
});
assert.commandWorked(db.dropDatabase());
}
const setParameterOpts = {
analyzeShardKeyNumMostCommonValues: numMostCommonValues
};
@ -535,9 +32,9 @@ const setParameterOpts = {
const st =
new ShardingTest({shards: 2, rs: {nodes: numNodesPerRS, setParameter: setParameterOpts}});
testAnalyzeCandidateShardKeysUnshardedCollection(st.s, {st});
testAnalyzeCandidateShardKeysShardedCollection(st);
testAnalyzeCurrentShardKeys(st);
testAnalyzeCandidateShardKeysUnshardedCollection(st.s, {st}, writeConcern);
testAnalyzeCandidateShardKeysShardedCollection(st.s, st, writeConcern);
testAnalyzeCurrentShardKeys(st.s, st, writeConcern);
st.stop();
}
@ -548,7 +45,7 @@ if (!jsTestOptions().useAutoBootstrapProcedure) { // TODO: SERVER-80318 Remove
rst.startSet();
rst.initiate();
testAnalyzeCandidateShardKeysUnshardedCollection(rst.getPrimary(), {rst});
testAnalyzeCandidateShardKeysUnshardedCollection(rst.getPrimary(), {rst}, writeConcern);
rst.stopSet();
}
}

View File

@ -4,8 +4,20 @@
*/
import {FixtureHelpers} from "jstests/libs/fixture_helpers.js";
import {
AnalyzeShardKeyUtil
} from "jstests/sharding/analyze_shard_key/libs/analyze_shard_key_util.js";
import {getNonPrimaryShardName} from "jstests/sharding/libs/sharding_util.js";
export const setParameterOpts = {
analyzeShardKeyNumRanges: 100
};
// The sampling-based initial split policy needs 10 samples per split point so
// 10 * analyzeShardKeyNumRanges is the minimum number of distinct shard key values that the
// collection must have for the command to not fail to generate split points.
const numDocs = 10 * setParameterOpts.analyzeShardKeyNumRanges;
export function testNonExistingCollection(dbName, testCases) {
const collName = "testCollNonExisting";
const ns = dbName + "." + collName;
@ -25,7 +37,7 @@ export function testNonExistingCollection(dbName, testCases) {
});
}
export function testExistingUnshardedCollection(dbName, writeConn, testCases, numDocs) {
export function testExistingUnshardedCollection(dbName, writeConn, testCases) {
const collName = "testCollUnsharded";
const ns = dbName + "." + collName;
const db = writeConn.getDB(dbName);
@ -97,7 +109,7 @@ export function testExistingUnshardedCollection(dbName, writeConn, testCases, nu
});
}
export function testExistingShardedCollection(dbName, mongos, testCases, numDocs) {
export function testExistingShardedCollection(dbName, mongos, testCases) {
const collName = "testCollSharded";
const ns = dbName + "." + collName;
const db = mongos.getDB(dbName);

View File

@ -1,31 +1,31 @@
/**
* Utilities for testing the analyzeShardKey command.
*/
export var AnalyzeShardKeyUtil = (function() {
export var AnalyzeShardKeyUtil = {
/**
* Returns true if the given key pattern contains a hashed key.
*/
function isHashedKeyPattern(keyPattern) {
isHashedKeyPattern(keyPattern) {
for (let fieldName in keyPattern) {
if (keyPattern[fieldName] == "hashed") {
return true;
}
}
return false;
}
},
/**
* Returns true if the given key pattern is {_id: 1}.
*/
function isIdKeyPattern(keyPattern) {
isIdKeyPattern(keyPattern) {
return bsonWoCompare(keyPattern, {_id: 1}) == 0;
}
},
/**
* Returns a set of current shard key field names, candidate shard key field names and
* index key field names combined.
*/
function getCombinedFieldNames(currentShardKey, candidateShardKey, indexKey) {
getCombinedFieldNames(currentShardKey, candidateShardKey, indexKey) {
const fieldNames = new Set([]);
for (let fieldName in currentShardKey) {
fieldNames.add(fieldName);
@ -37,24 +37,24 @@ export var AnalyzeShardKeyUtil = (function() {
fieldNames.add(fieldName);
}
return fieldNames;
}
},
/**
* Returns the value for the given field.
*/
function getDottedField(doc, fieldName) {
getDottedField(doc, fieldName) {
let val = doc;
const fieldNames = fieldName.split(".");
for (let i = 0; i < fieldNames.length; i++) {
val = val[fieldNames[i]];
}
return val;
}
},
/**
* Sets the given field to the given value. The field name can be dotted.
*/
function setDottedField(doc, fieldName, val) {
setDottedField(doc, fieldName, val) {
let obj = doc;
const fieldNames = fieldName.split(".");
for (let i = 0; i < fieldNames.length; i++) {
@ -68,38 +68,38 @@ export var AnalyzeShardKeyUtil = (function() {
}
obj = obj[fieldName];
}
}
},
/**
* Extracts the shard key value from the given document.
*/
function extractShardKeyValueFromDocument(doc, shardKey) {
extractShardKeyValueFromDocument(doc, shardKey) {
const shardKeyValue = {};
for (let fieldName in shardKey) {
shardKeyValue[fieldName] = AnalyzeShardKeyUtil.getDottedField(doc, fieldName);
}
return shardKeyValue;
}
},
/**
* Returns a random integer between the given range (inclusive).
*/
function getRandInteger(min, max) {
getRandInteger(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
},
/**
* Returns a random element in the given array.
*/
function getRandomElement(arr) {
return arr[getRandInteger(0, arr.length - 1)];
}
getRandomElement(arr) {
return arr[this.getRandInteger(0, arr.length - 1)];
},
/**
* Returns the field name "<prefix>", "<prefix>.x" or "<prefix>.x.y" with roughly equal
* probability.
*/
function getRandomFieldName(prefix) {
getRandomFieldName(prefix) {
const prob = Math.random();
if (prob < 0.3) {
return prefix;
@ -107,73 +107,73 @@ export var AnalyzeShardKeyUtil = (function() {
return prefix + ".x";
}
return prefix + ".x.y";
}
},
/**
* Returns true if the collection is a clustered collection. Assumes that the collection
* exists.
*/
function isClusterCollection(conn, dbName, collName) {
isClusterCollection(conn, dbName, collName) {
const listCollectionRes = assert.commandWorked(
conn.getDB(dbName).runCommand({listCollections: 1, filter: {name: collName}}));
return listCollectionRes.cursor.firstBatch[0].options.hasOwnProperty("clusteredIndex");
}
},
/**
* Enables profiling of the given database on all the given mongods.
*/
function enableProfiler(mongodConns, dbName) {
enableProfiler(mongodConns, dbName) {
mongodConns.forEach(conn => {
assert.commandWorked(conn.getDB(dbName).setProfilingLevel(2));
});
}
},
/**
* Disables profiling of the given database on all the given mongods.
*/
function disableProfiler(mongodConns, dbName) {
disableProfiler(mongodConns, dbName) {
mongodConns.forEach(conn => {
assert.commandWorked(conn.getDB(dbName).setProfilingLevel(0));
});
}
},
function calculatePercentage(part, whole) {
calculatePercentage(part, whole) {
assert.gte(part, 0);
assert.gt(whole, 0);
assert.lte(part, whole);
return (part * 100.0 / whole);
}
},
/**
* Returns true if 'objs' contains 'obj'.
*/
function containsBSONObj(objs, obj) {
containsBSONObj(objs, obj) {
for (let tmpObj of objs) {
if (bsonWoCompare(obj, tmpObj) == 0) {
return true;
}
}
return false;
}
},
// The analyzeShardKey command rounds the percentages 10 decimal places. The epsilon is chosen
// to account for that.
function assertApprox(actual, expected, msg, epsilon = 1e-9) {
assertApprox(actual, expected, msg, epsilon = 1e-9) {
return assert.lte(Math.abs(actual - expected), epsilon, {actual, expected, msg});
}
},
/**
* Asserts that the difference between 'actual' and 'expected' is less than 'maxPercentage' of
* 'expected'.
*/
function assertDiffPercentage(actual, expected, maxPercentage) {
assertDiffPercentage(actual, expected, maxPercentage) {
const actualPercentage = Math.abs(actual - expected) * 100 / expected;
assert.lt(actualPercentage,
maxPercentage,
tojson({actual, expected, maxPercentage, actualPercentage}));
}
},
function validateKeyCharacteristicsMetrics(metrics) {
validateKeyCharacteristicsMetrics(metrics) {
assert.gt(metrics.numDocsTotal, 0, metrics);
assert.gt(metrics.numDocsSampled, 0, metrics);
assert.gt(metrics.numDistinctValues, 0, metrics);
@ -211,13 +211,13 @@ export var AnalyzeShardKeyUtil = (function() {
assert.gte(Math.abs(coefficient), 0, metrics);
assert.lte(Math.abs(coefficient), 1, metrics);
}
}
},
function assertNotContainKeyCharacteristicsMetrics(res) {
assertNotContainKeyCharacteristicsMetrics(res) {
assert(!res.hasOwnProperty("keyCharacteristics"), res);
}
},
function assertContainKeyCharacteristicsMetrics(res) {
assertContainKeyCharacteristicsMetrics(res) {
assert(res.hasOwnProperty("keyCharacteristics"), res);
const metrics = res.keyCharacteristics;
assert(metrics.hasOwnProperty("numDocsTotal"), metrics);
@ -226,10 +226,10 @@ export var AnalyzeShardKeyUtil = (function() {
assert(metrics.hasOwnProperty("mostCommonValues"), metrics);
assert(metrics.hasOwnProperty("monotonicity"), metrics);
assert(metrics.hasOwnProperty("avgDocSizeBytes"), metrics);
validateKeyCharacteristicsMetrics(metrics);
}
this.validateKeyCharacteristicsMetrics(metrics);
},
function assertKeyCharacteristicsMetrics(actual, expected) {
assertKeyCharacteristicsMetrics(actual, expected) {
assert.eq(actual.numDocsTotal, expected.numDocs, {actual, expected});
assert.eq(actual.numDocsSampled, expected.numDocs, {actual, expected});
assert.eq(actual.isUnique, expected.isUnique, {actual, expected});
@ -250,7 +250,7 @@ export var AnalyzeShardKeyUtil = (function() {
});
// Verify that this shard key value is among the expected ones.
assert(containsBSONObj(expected.mostCommonValues, mostCommonValue), {
assert(this.containsBSONObj(expected.mostCommonValues, mostCommonValue), {
mostCommonValue,
actual: actual.mostCommonValues,
expected: expected.mostCommonValues
@ -260,9 +260,9 @@ export var AnalyzeShardKeyUtil = (function() {
assert(actual.hasOwnProperty("monotonicity"), {actual, expected});
assert(actual.hasOwnProperty("avgDocSizeBytes"), {actual, expected});
}
},
function validateReadDistributionMetrics(metrics) {
validateReadDistributionMetrics(metrics) {
if (metrics.sampleSize.total == 0) {
assert.eq(bsonWoCompare(
metrics,
@ -286,16 +286,16 @@ export var AnalyzeShardKeyUtil = (function() {
assert.lte(metrics[fieldName], 100);
}
}
assertApprox(metrics.percentageOfSingleShardReads +
metrics.percentageOfMultiShardReads +
metrics.percentageOfScatterGatherReads,
100,
metrics);
this.assertApprox(metrics.percentageOfSingleShardReads +
metrics.percentageOfMultiShardReads +
metrics.percentageOfScatterGatherReads,
100,
metrics);
assert.gt(metrics.numReadsByRange.length, 0);
}
}
},
function validateWriteDistributionMetrics(metrics) {
validateWriteDistributionMetrics(metrics) {
if (metrics.sampleSize.total == 0) {
assert.eq(
bsonWoCompare(metrics,
@ -322,28 +322,28 @@ export var AnalyzeShardKeyUtil = (function() {
assert.lte(metrics[fieldName], 100);
}
}
assertApprox(metrics.percentageOfSingleShardWrites +
metrics.percentageOfMultiShardWrites +
metrics.percentageOfScatterGatherWrites,
100,
metrics);
this.assertApprox(metrics.percentageOfSingleShardWrites +
metrics.percentageOfMultiShardWrites +
metrics.percentageOfScatterGatherWrites,
100,
metrics);
assert.gt(metrics.numWritesByRange.length, 0);
}
}
},
function assertNotContainReadWriteDistributionMetrics(res) {
assertNotContainReadWriteDistributionMetrics(res) {
assert(!res.hasOwnProperty("readDistribution"));
assert(!res.hasOwnProperty("writeDistribution"));
}
},
function assertContainReadWriteDistributionMetrics(res) {
assertContainReadWriteDistributionMetrics(res) {
assert(res.hasOwnProperty("readDistribution"));
assert(res.hasOwnProperty("writeDistribution"));
validateReadDistributionMetrics(res.readDistribution);
validateWriteDistributionMetrics(res.writeDistribution);
}
this.validateReadDistributionMetrics(res.readDistribution);
this.validateWriteDistributionMetrics(res.writeDistribution);
},
function validateSampledQueryDocument(doc) {
validateSampledQueryDocument(doc) {
const readCmdNames = new Set(["find", "aggregate", "count", "distinct"]);
assert(doc.hasOwnProperty("ns"), doc);
assert(doc.hasOwnProperty("collectionUuid"), doc);
@ -354,30 +354,5 @@ export var AnalyzeShardKeyUtil = (function() {
assert(doc.cmd.hasOwnProperty("filter"));
assert(doc.cmd.hasOwnProperty("collation"));
}
}
return {
isHashedKeyPattern,
isIdKeyPattern,
getCombinedFieldNames,
getDottedField,
setDottedField,
extractShardKeyValueFromDocument,
getRandInteger,
getRandomElement,
getRandomFieldName,
isClusterCollection,
enableProfiler,
disableProfiler,
calculatePercentage,
assertApprox,
assertDiffPercentage,
assertNotContainKeyCharacteristicsMetrics,
assertContainKeyCharacteristicsMetrics,
assertKeyCharacteristicsMetrics,
validateKeyCharacteristicsMetrics,
assertNotContainReadWriteDistributionMetrics,
assertContainReadWriteDistributionMetrics,
validateSampledQueryDocument
};
})();
},
};

View File

@ -0,0 +1,660 @@
/**
* Defines tests and test cases that are common to cardinality_and_frequency tests in
* sharding/ and core_sharding/.
*/
import {
AnalyzeShardKeyUtil
} from "jstests/sharding/analyze_shard_key/libs/analyze_shard_key_util.js";
import {
assertAggregateQueryPlans,
getMongodConns,
numMostCommonValues
} from "jstests/sharding/analyze_shard_key/libs/cardinality_and_frequency_common.js";
import {
getNonPrimaryShardName,
getPrimaryShardNameForDB
} from "jstests/sharding/libs/sharding_util.js";
// Define base test cases. For each test case:
// - 'shardKey' is the shard key being analyzed.
// - 'indexKey' is the index that the collection has.
// - 'indexOptions' is the additional options for the index.
const shardKeyPrefixedIndexTestCases = [
// Test non-compound shard keys with a shard key index.
{shardKey: {a: 1}, indexKey: {a: 1}, expectMetrics: true},
{shardKey: {a: "hashed"}, indexKey: {a: "hashed"}, expectMetrics: true},
{shardKey: {"a.x": 1}, indexKey: {"a.x": 1}, expectMetrics: true},
{shardKey: {"a.x.y": 1}, indexKey: {"a.x.y": 1}, expectMetrics: true},
// Test compound shard keys with a shard key index.
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: 1}, expectMetrics: true},
{shardKey: {"a.x": 1, "b": "hashed"}, indexKey: {"a.x": 1, "b": "hashed"}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed", b: 1}, indexKey: {"a.x.y": "hashed", b: 1}, expectMetrics: true},
// Test non-compound and compound shard keys with a shard key prefixed index.
{shardKey: {a: 1}, indexKey: {a: 1, b: 1}, expectMetrics: true},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: 1, c: 1}, expectMetrics: true},
{shardKey: {"a.x": 1}, indexKey: {"a.x": 1, b: "hashed"}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed"}, indexKey: {"a.x.y": "hashed", b: 1}, expectMetrics: true},
// Test shard keys with _id.
{shardKey: {_id: 1}, indexKey: {_id: 1}, expectMetrics: true},
{shardKey: {_id: 1, a: 1}, indexKey: {_id: 1, a: 1}, expectMetrics: true},
// Test shard key indexes with simple collation.
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {collation: {locale: "simple"}},
expectMetrics: true
},
];
const compatibleIndexTestCases = [
// Test non-compound and compound shard keys with a compatible index.
{shardKey: {a: 1}, indexKey: {a: "hashed"}, expectMetrics: true},
{shardKey: {a: "hashed"}, indexKey: {a: 1}, expectMetrics: true},
{shardKey: {"a.x": 1, b: "hashed"}, indexKey: {"a.x": "hashed", b: 1}, expectMetrics: true},
{shardKey: {"a.x.y": "hashed", b: 1}, indexKey: {"a.x.y": 1, b: "hashed"}, expectMetrics: true},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, b: "hashed", c: 1}, expectMetrics: true},
// Test shard keys with _id.
{shardKey: {_id: "hashed"}, indexKey: {_id: 1}, expectMetrics: true},
// Test shard key indexes with simple collation.
{
shardKey: {a: 1},
indexKey: {a: "hashed"},
indexOptions: {collation: {locale: "simple"}},
expectMetrics: true
},
];
const noIndexTestCases = [
// Test non-compound and compound shard keys without a shard key prefixed index or a compatible
// index.
{shardKey: {a: 1}, expectMetrics: false},
{shardKey: {a: 1, b: 1}, indexKey: {b: 1}, expectMetrics: false},
{shardKey: {a: 1, b: 1}, indexKey: {a: 1, c: 1}, expectMetrics: false},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {collation: {locale: "fr"}}, // non-simple collation.
expectMetrics: false
},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {sparse: true},
expectMetrics: false,
},
{
shardKey: {a: 1},
indexKey: {a: 1},
indexOptions: {partialFilterExpression: {a: {$gte: 1}}},
expectMetrics: false
},
];
// Construct test cases from the base test cases above.
export const candidateKeyTestCases = [];
export const currentKeyTestCases = [];
for (let testCaseBase of shardKeyPrefixedIndexTestCases) {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
testCase.expectUnique = false;
candidateKeyTestCases.push(testCase);
currentKeyTestCases.push(testCase);
}
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(testCaseBase.indexKey)) {
// Hashed indexes cannot have a uniqueness constraint.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = true;
testCase.expectUnique =
Object.keys(testCaseBase.shardKey).length == Object.keys(testCaseBase.indexKey).length;
candidateKeyTestCases.push(testCase);
currentKeyTestCases.push(testCase);
}
}
for (let testCaseBase of compatibleIndexTestCases) {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
testCase.expectUnique = false;
candidateKeyTestCases.push(testCase);
}
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(testCaseBase.indexKey)) {
// Hashed indexes cannot have a uniqueness constraint.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = true;
testCase.expectUnique =
Object.keys(testCaseBase.shardKey).length == Object.keys(testCaseBase.indexKey).length;
candidateKeyTestCases.push(testCase);
}
}
for (let testCaseBase of noIndexTestCases) {
// No metrics are expected for these test cases so there is no need to test with both non-unique
// and unique index.
const testCase = Object.extend({indexOptions: {}}, testCaseBase, true /* deep */);
testCase.indexOptions.unique = false;
candidateKeyTestCases.push(testCase);
}
/**
* Returns an object where each field name is set to the given value.
*/
export function makeDocument(fieldNames, value) {
const doc = {};
fieldNames.forEach(fieldName => {
AnalyzeShardKeyUtil.setDottedField(doc, fieldName, value);
});
return doc;
}
/**
* Tests the cardinality and frequency metrics for a shard key that either has a non-unique
* supporting/compatible index or doesn't a supporting/compatible index.
*/
function testAnalyzeShardKeyNoUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern) {
assert(!testCase.indexOptions.unique);
const db = conn.getDB(dbName);
const ns = dbName + "." + collName;
const coll = db.getCollection(collName);
const fieldNames = AnalyzeShardKeyUtil.getCombinedFieldNames(
currentShardKey, testCase.shardKey, testCase.indexKey);
const shardKeyContainsId = testCase.shardKey.hasOwnProperty("_id");
const isUnique = false;
const makeSubTestCase = (numDistinctValues) => {
const docs = [];
const mostCommonValues = [];
const maxFrequency = shardKeyContainsId ? 1 : numDistinctValues;
let sign = 1;
for (let i = 1; i <= numDistinctValues; i++) {
// Test with integer field half of time and object field half of the time.
const val = sign * i;
const doc = makeDocument(fieldNames, Math.random() > 0.5 ? val : {foo: val});
const frequency = shardKeyContainsId ? 1 : i;
for (let j = 1; j <= frequency; j++) {
docs.push(doc);
}
const isMostCommon = (maxFrequency - frequency) < numMostCommonValues;
if (testCase.expectMetrics && isMostCommon) {
mostCommonValues.push({
value: AnalyzeShardKeyUtil.extractShardKeyValueFromDocument(doc,
testCase.shardKey),
frequency
});
}
sign *= -1;
}
const metrics = {
numDocs: docs.length,
isUnique,
numDistinctValues,
mostCommonValues,
numMostCommonValues
};
return [docs, metrics];
};
// Analyze the shard key while the collection has less than 'numMostCommonValues' distinct shard
// key values.
const [docs0, metrics0] = makeSubTestCase(numMostCommonValues - 1);
assert.commandWorked(coll.insert(docs0, {writeConcern}));
jsTest.log(
"Testing metrics with non-unique index, numDistinctValues = numMostCommonValues - 1");
const res0 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
if (!res0.ok) {
jsTest.log("common_tests 223 " + tojson(res0));
}
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res0.keyCharacteristics, metrics0);
} else {
assert.commandFailedWithCode(res0, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has exactly 'numMostCommonValues' distinct shard
// key values.
const [docs1, metrics1] = makeSubTestCase(numMostCommonValues);
assert.commandWorked(coll.insert(docs1, {writeConcern}));
jsTest.log("Testing metrics with non-unique index, numDistinctValues = numMostCommonValues");
const res1 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res1.keyCharacteristics, metrics1);
} else {
assert.commandFailedWithCode(res1, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has more than 'numMostCommonValues' distinct shard
// key values.
const [docs2, metrics2] = makeSubTestCase(numMostCommonValues * 25);
assert.commandWorked(coll.insert(docs2, {writeConcern}));
jsTest.log(
"Testing metrics with non-unique index, numDistinctValues = numMostCommonValues * 25");
const res2 = conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
});
if (testCase.expectMetrics) {
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res2.keyCharacteristics, metrics2);
} else {
assert.commandFailedWithCode(res2, ErrorCodes.IllegalOperation);
}
assert.commandWorked(coll.remove({}, {writeConcern}));
}
/**
* Tests the cardinality and frequency metrics for a shard key that has a unique
* supporting/compatible index.
*/
function testAnalyzeShardKeyUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern) {
assert(testCase.indexOptions.unique);
assert(testCase.expectMetrics);
const db = conn.getDB(dbName);
const ns = dbName + "." + collName;
const coll = db.getCollection(collName);
const fieldNames = AnalyzeShardKeyUtil.getCombinedFieldNames(
currentShardKey, testCase.shardKey, testCase.indexKey);
const isUnique = testCase.expectUnique;
const makeSubTestCase = (numDistinctValues) => {
const docs = [];
const mostCommonValues = [];
let sign = 1;
for (let i = 1; i <= numDistinctValues; i++) {
// Test with integer field half of time and object field half of the time.
const val = sign * i;
const doc = makeDocument(fieldNames, Math.random() > 0.5 ? val : {foo: val});
docs.push(doc);
mostCommonValues.push({
value: AnalyzeShardKeyUtil.extractShardKeyValueFromDocument(doc, testCase.shardKey),
frequency: 1
});
sign *= -1;
}
const metrics = {
numDocs: docs.length,
isUnique,
numDistinctValues,
mostCommonValues,
numMostCommonValues
};
return [docs, metrics];
};
// Analyze the shard key while the collection has less than 'numMostCommonValues' distinct shard
// key values.
const [docs0, metrics0] = makeSubTestCase(numMostCommonValues - 1);
assert.commandWorked(coll.insert(docs0, {writeConcern}));
jsTest.log(
"Testing metrics with non-unique index, numDistinctValues = numMostCommonValues - 1");
const res0 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res0.keyCharacteristics, metrics0);
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has exactly 'numMostCommonValues' distinct shard
// key values.
const [docs1, metrics1] = makeSubTestCase(numMostCommonValues);
assert.commandWorked(coll.insert(docs1, {writeConcern}));
jsTest.log("Testing metrics with non-unique index, numDistinctValues = numMostCommonValues");
const res1 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res1.keyCharacteristics, metrics1);
assert.commandWorked(coll.remove({}, {writeConcern}));
// Analyze the shard key while the collection has more than 'numMostCommonValues' distinct shard
// key values.
const [docs2, metrics2] = makeSubTestCase(numMostCommonValues * 25);
assert.commandWorked(coll.insert(docs2, {writeConcern}));
jsTest.log(
"Testing metrics with non-unique index, numDistinctValues = numMostCommonValues * 25");
const res2 = assert.commandWorked(conn.adminCommand({
analyzeShardKey: ns,
key: testCase.shardKey,
comment: testCase.comment,
// Skip calculating the read and write distribution metrics since they are not needed by
// this test.
readWriteDistribution: false
}));
AnalyzeShardKeyUtil.assertKeyCharacteristicsMetrics(res2.keyCharacteristics, metrics2);
assert.commandWorked(coll.remove({}, {writeConcern}));
}
/**
* Runs through all test cases to test shard key metrics with a supporting unique or non-unique
* index on an unsharded collection.
*
* @param {rst, st} Tests that use javascript ShardingTest fixture pass in either a replica set or a
* sharded cluster object (db is null).
*/
export function testAnalyzeCandidateShardKeysUnshardedCollection(conn, {rst, st}, writeConcern) {
const dbName = jsTestName() + "db";
const db = conn.getDB(dbName);
const collName = "testCollUnshardedCandidate";
const coll = db.getCollection(collName);
const mongodConns = (rst && st) ? getMongodConns({rst, st}) : null;
jsTest.log(
`Testing candidate shard keys for an unsharded collection: ${tojson({dbName, collName})}`);
candidateKeyTestCases.forEach(testCaseBase => {
const testCase = Object.assign({}, testCaseBase);
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
jsTest.log(`Testing metrics for ${tojson({dbName, collName, testCase})}`);
// Some suites automatically create indexes that conflict with some test cases here;
// if index creation fails, skip this test case.
let skipTestCase = false;
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
const result = coll.createIndex(testCase.indexKey, testCase.indexOptions);
if (!result.ok && result.code === ErrorCodes.CannotCreateIndex) {
jsTest.log(
"Skipping testAnalyzeCandidateShardKeyUnshardedCollection test case because CannotCreateIndex: " +
result.errmsg);
skipTestCase = true;
} else {
assert.commandWorked(result);
}
}
if (!skipTestCase) {
if (mongodConns) {
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
}
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(
conn, dbName, collName, null /* currentShardKey */, testCase, writeConcern);
} else {
testAnalyzeShardKeyNoUniqueIndex(
conn, dbName, collName, null /* currentShardKey */, testCase, writeConcern);
}
if (mongodConns) {
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(mongodConns,
dbName,
collName,
testCase.comment,
// On a replica set, the analyzeShardKey command runs the
// aggregate commands locally, i.e. the commands do not go
// through the service entry point so do not get profiled.
testCase.expectMetrics && !rst /* expectEntries */);
}
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.dropIndex(testCase.indexKey));
}
}
});
assert.commandWorked(db.dropDatabase());
}
/**
* Runs through all test cases to test shard key metrics with a supporting unique or non-unique
* index on an unsharded collection.
*
* @param db Tests that use Python sharding fixture (core passthrough) pass in a db connection
* ({rst, st} is empty).
* @param conn Tests that use javascript ShardingTest fixture pass in a mongos connection; tests
* that use Python
* sharding fixture pass in a db connection.
* @param {rst, st} Tests that use javascript ShardingTest fixture pass in either a replica set or a
* sharded
* cluster object (db is null).
*/
export function testAnalyzeCandidateShardKeysShardedCollection(conn, st, writeConcern) {
const dbName = jsTestName() + "db";
const db = conn.getDB(dbName);
const collName = "testCollShardedCandidate";
const ns = dbName + "." + collName;
const currentShardKey = {skey: 1};
const currentShardKeySplitPoint = {skey: 0};
const coll = db.getCollection(collName);
const mongodConns = (st) ? getMongodConns({st}) : null;
// Make sure the database exists by inserting a document.
{
const initialcoll = db["initialCollection"];
assert.commandWorked(initialcoll.insert([{a: 1}], {writeConcern}));
}
jsTest.log(
`Testing candidate shard keys for a sharded collection: ${tojson({dbName, collName})}`);
const primaryShardName = getPrimaryShardNameForDB(db);
const nonPrimaryShard = getNonPrimaryShardName(db);
assert.commandWorked(
conn.adminCommand({enableSharding: dbName, primaryShard: primaryShardName}));
const result = conn.adminCommand({shardCollection: ns, key: currentShardKey});
// Some suites automatically create indexes that prevent the collection from being
// sharded with the specified shard key.
if (!result.ok && result.code == ErrorCodes.AlreadyInitialized) {
jsTest.log(
"Skipping testAnalyzeCandidateShardKeysShardedCollection because AlreadyInitialized: " +
result.errmsg);
return;
}
assert.commandWorked(conn.adminCommand({split: ns, middle: currentShardKeySplitPoint}));
assert.commandWorked(
conn.adminCommand({moveChunk: ns, find: currentShardKeySplitPoint, to: nonPrimaryShard}));
candidateKeyTestCases.forEach(testCaseBase => {
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCaseBase.indexKey)) {
return;
}
const testCase = Object.assign({}, testCaseBase);
if (currentShardKey && testCase.indexOptions.unique) {
// It is illegal to create a unique index that doesn't have the shard key as a prefix.
assert(testCase.indexKey);
testCase.shardKey = Object.assign({}, currentShardKey, testCase.shardKey);
testCase.indexKey = Object.assign({}, currentShardKey, testCase.indexKey);
}
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
jsTest.log(`Testing metrics for ${tojson({dbName, collName, currentShardKey, testCase})}`);
// Some suites automatically create indexes that conflict with some test cases here;
// if index creation fails, skip this test case.
let skipTestCase = false;
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
const result = coll.createIndex(testCase.indexKey, testCase.indexOptions);
if (!result.ok && result.code === ErrorCodes.CannotCreateIndex) {
jsTest.log(
"Skipping testAnalyzeCandidateShardKeyUnshardedCollection test case because CannotCreateIndex: " +
result.errmsg);
skipTestCase = true;
} else {
assert.commandWorked(result);
}
}
if (!skipTestCase) {
if (mongodConns) {
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
}
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern);
} else {
testAnalyzeShardKeyNoUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern);
}
if (mongodConns) {
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(mongodConns,
dbName,
collName,
testCase.comment,
testCase.expectMetrics /* expectEntries */);
}
if (testCase.indexKey && !AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
assert.commandWorked(coll.dropIndex(testCase.indexKey));
}
}
});
assert.commandWorked(db.dropDatabase());
}
/**
* Runs through all test cases to test shard key metrics with a supporting unique or non-unique
* index on an existing shard key of a sharded collection.
*
* @param st Tests that use javascript ShardingTest fixture pass in a ShardingTest object (db is
* null).
*/
export function testAnalyzeCurrentShardKeys(conn, st, writeConcern) {
const dbName = jsTestName() + "db";
const db = conn.getDB(dbName);
const mongodConns = st ? getMongodConns({st}) : null;
// Make sure the database exists by inserting a document.
{
const initialcoll = db["initialCollection"];
assert.commandWorked(initialcoll.insert([{a: 1}], {writeConcern}));
}
jsTest.log(`Testing current shard key for sharded collections: ${tojson({dbName})}`);
const primaryShardName = getPrimaryShardNameForDB(db);
const nonPrimaryShard = getNonPrimaryShardName(db);
assert.commandWorked(db.adminCommand({enableSharding: dbName, primaryShard: primaryShardName}));
let testNum = 0;
currentKeyTestCases.forEach(testCaseBase => {
const testCase = Object.assign({}, testCaseBase);
// Used to identify the operations performed by the analyzeShardKey commands in this test
// case.
testCase.comment = UUID();
const collName = "testCollShardedCurrent-" + testNum++;
const ns = dbName + "." + collName;
const currentShardKey = testCase.shardKey;
const coll = db.getCollection(collName);
jsTest.log("Testing metrics for " + tojson({dbName, collName, currentShardKey, testCase}));
// Some suites automatically create indexes that conflict with some test cases here;
// if index creation fails, skip this test case.
let skipTestCase = false;
if (!AnalyzeShardKeyUtil.isIdKeyPattern(testCase.indexKey)) {
const result = coll.createIndex(testCase.indexKey, testCase.indexOptions);
if (!result.ok && result.code == ErrorCodes.CannotCreateIndex) {
jsTest.log(
"Skipping testAnalyzeCurrentShardKeys test case because CannotCreateIndex: " +
result.errmsg);
skipTestCase = true;
} else {
assert.commandWorked(result);
}
}
// TODO SERVER-97056 Remove the condition !testCase.indexOptions.unique for skipping this
// test case.
if (!skipTestCase && !testCase.indexOptions.unique) {
const result = db.adminCommand({shardCollection: ns, key: currentShardKey});
// Some suites automatically create indexes that prevent the collection from being
// sharded with the specified shard key.
if (!result.ok && result.code == ErrorCodes.AlreadyInitialized) {
jsTest.log(
"Skipping testAnalyzeCandidateShardKeysShardedCollection because AlreadyInitialized: " +
result.errmsg);
} else {
assert.commandWorked(result);
if (!AnalyzeShardKeyUtil.isHashedKeyPattern(currentShardKey)) {
let shardKeySplitPoint = {};
for (let fieldName in currentShardKey) {
shardKeySplitPoint[fieldName] = 0;
}
assert.commandWorked(db.adminCommand({split: ns, middle: shardKeySplitPoint}));
assert.commandWorked(db.adminCommand(
{moveChunk: ns, find: shardKeySplitPoint, to: nonPrimaryShard}));
}
if (mongodConns) {
AnalyzeShardKeyUtil.enableProfiler(mongodConns, dbName);
}
if (testCase.indexOptions.unique) {
testAnalyzeShardKeyUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern);
} else {
testAnalyzeShardKeyNoUniqueIndex(
conn, dbName, collName, currentShardKey, testCase, writeConcern);
}
if (mongodConns) {
AnalyzeShardKeyUtil.disableProfiler(mongodConns, dbName);
assertAggregateQueryPlans(mongodConns,
dbName,
collName,
testCase.comment,
testCase.expectMetrics /* expectEntries */);
}
}
}
});
assert.commandWorked(db.dropDatabase());
}

View File

@ -10,7 +10,7 @@ export function getShardNames(db) {
* Finds the _id of the primary shard for database 'dbname', e.g., 'test-rs0'
*/
export function getPrimaryShardIdForDatabase(conn, dbname) {
var x = conn.getDB("config").databases.findOne({_id: "" + dbname});
var x = conn.getDB("config").databases.findOne({_id: dbname});
if (x) {
return x.primary;
}