diff --git a/jstests/aggregation/expressions/date_from_parts.js b/jstests/aggregation/expressions/date_from_parts.js index cd16fe6af49..1f53aebc193 100644 --- a/jstests/aggregation/expressions/date_from_parts.js +++ b/jstests/aggregation/expressions/date_from_parts.js @@ -558,7 +558,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleA"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.DurationOverflow, @@ -566,7 +566,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128A"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.DurationOverflow, @@ -574,11 +574,13 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDoubleB"}}}}]; - assertErrMsgContains(coll, pipeline, 40515, "'millisecond' must evaluate to an integer"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 40515, "'millisecond' must evaluate to an integer"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, millisecond: "$veryBigDecimal128B"}}}}]; - assertErrMsgContains(coll, pipeline, 40515, "'millisecond' must evaluate to an integer"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 40515, "'millisecond' must evaluate to an integer"); /* --------------------------------------------------------------------------------------- */ /* Testing that year values are only allowed in the range [0, 9999] and that month, day, hour, @@ -594,92 +596,94 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE }])); pipeline = [{$project: {date: {"$dateFromParts": {year: "$bigYear"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999"); pipeline = [{$project: {date: {"$dateFromParts": {year: "$smallYear"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 40523, "'year' must evaluate to an integer in the range 0 to 9999"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: "$prettyBigNegativeInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'month' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{ $project: {date: {"$dateFromParts": {year: 1970, month: 1, day: "$prettyBigNegativeInt"}}} }]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'day' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: "$prettyBigNegativeInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'hour' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{ $project: {date: {"$dateFromParts": {year: 1970, hour: 0, minute: "$prettyBigNegativeInt"}}} }]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'minute' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$bigYear"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999"); pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: "$smallYear"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31095, "'isoWeekYear' must evaluate to an integer in the range 0 to 9999"); pipeline = [{$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigInt"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{ $project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoWeek: "$prettyBigNegativeInt"}}} }]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 31034, "'isoWeek' must evaluate to a value in the range [-32768, 32767]"); pipeline = [ {$project: {date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigInt"}}}} ]; - assertErrMsgContains(coll, - pipeline, - 31034, - "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]"); + assertErrCodeAndErrMsgContains( + coll, + pipeline, + 31034, + "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]"); pipeline = [{ $project: { date: {"$dateFromParts": {isoWeekYear: 1970, isoDayOfWeek: "$prettyBigNegativeInt"}} } }]; - assertErrMsgContains(coll, - pipeline, - 31034, - "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]"); + assertErrCodeAndErrMsgContains( + coll, + pipeline, + 31034, + "'isoDayOfWeek' must evaluate to a value in the range [-32768, 32767]"); /* --------------------------------------------------------------------------------------- */ /* Testing wrong arguments */ diff --git a/jstests/aggregation/expressions/date_from_string.js b/jstests/aggregation/expressions/date_from_string.js index e62b4f9f392..3e905d9fbfe 100644 --- a/jstests/aggregation/expressions/date_from_string.js +++ b/jstests/aggregation/expressions/date_from_string.js @@ -593,10 +593,10 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE ]; pipelines.forEach(function(pipeline) { - assertErrMsgContains(coll, - pipeline, - ErrorCodes.ConversionFailure, - "an incomplete date/time string has been found"); + assertErrCodeAndErrMsgContains(coll, + pipeline, + ErrorCodes.ConversionFailure, + "an incomplete date/time string has been found"); }); /* --------------------------------------------------------------------------------------- */ @@ -614,7 +614,7 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE ]; pipelines.forEach(function(pipeline) { - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.ConversionFailure, "Error parsing date string"); }); @@ -666,17 +666,17 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE /* Parse errors. */ let pipeline = [{$project: {date: {$dateFromString: "no-object"}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 40540, "$dateFromString only supports an object as an argument"); pipeline = [{$project: {date: {$dateFromString: {"unknown": "$tz"}}}}]; - assertErrMsgContains(coll, pipeline, 40541, "Unrecognized argument"); + assertErrCodeAndErrMsgContains(coll, pipeline, 40541, "Unrecognized argument"); pipeline = [{$project: {date: {$dateFromString: {dateString: 5}}}}]; - assertErrMsgContains(coll, - pipeline, - ErrorCodes.ConversionFailure, - "$dateFromString requires that 'dateString' be a string"); + assertErrCodeAndErrMsgContains(coll, + pipeline, + ErrorCodes.ConversionFailure, + "$dateFromString requires that 'dateString' be a string"); /* --------------------------------------------------------------------------------------- */ /* Passing in time zone with date/time string. */ @@ -723,17 +723,18 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE // Test umatched format specifier string. pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y-%m-%d"}}}}]; - assertErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing"); + assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing"); pipeline = [{$project: {date: {$dateFromString: {dateString: "2018-01", format: "%Y"}}}}]; - assertErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data"); + assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data"); // Test missing specifier prefix '%'. pipeline = [{$project: {date: {$dateFromString: {dateString: "1992-26-04", format: "Y-d-m"}}}}]; - assertErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Format literal not found"); + assertErrCodeAndErrMsgContains( + coll, pipeline, ErrorCodes.ConversionFailure, "Format literal not found"); pipeline = [{$project: {date: {$dateFromString: {dateString: "1992", format: "%n"}}}}]; - assertErrMsgContains(coll, pipeline, 18536, "Invalid format character"); + assertErrCodeAndErrMsgContains(coll, pipeline, 18536, "Invalid format character"); pipeline = [{ $project: { @@ -743,28 +744,28 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE } } }]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.ConversionFailure, "you cannot pass in a date/time string with GMT offset together with a timezone argument"); pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: 5}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 40684, "$dateFromString requires that 'format' be a string"); pipeline = [{$project: {date: {$dateFromString: {dateString: "4/26/1992", format: {}}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, 40684, "$dateFromString requires that 'format' be a string"); pipeline = [{$project: {date: {$dateFromString: {dateString: "ISO Day 6", format: "ISO Day %u"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid"); pipeline = [{$project: {date: {$dateFromString: {dateString: "ISO Week 52", format: "ISO Week %V"}}}}]; - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, pipeline, ErrorCodes.ConversionFailure, "The parsed date was invalid"); pipeline = [{ @@ -772,24 +773,24 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode and assertE date: {$dateFromString: {dateString: "ISO Week 1, 2018", format: "ISO Week %V, %Y"}} } }]; - assertErrMsgContains(coll, - pipeline, - ErrorCodes.ConversionFailure, - "Mixing of ISO dates with natural dates is not allowed"); + assertErrCodeAndErrMsgContains(coll, + pipeline, + ErrorCodes.ConversionFailure, + "Mixing of ISO dates with natural dates is not allowed"); pipeline = [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d/%G"}}}}]; - assertErrMsgContains(coll, - pipeline, - ErrorCodes.ConversionFailure, - "Mixing of ISO dates with natural dates is not allowed"); + assertErrCodeAndErrMsgContains(coll, + pipeline, + ErrorCodes.ConversionFailure, + "Mixing of ISO dates with natural dates is not allowed"); // Test embedded null bytes in the 'dateString' and 'format' fields. pipeline = [{$project: {date: {$dateFromString: {dateString: "12/31\0/2018", format: "%m/%d/%Y"}}}}]; - assertErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing"); + assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Data missing"); pipeline = [{$project: {date: {$dateFromString: {dateString: "12/31/2018", format: "%m/%d\0/%Y"}}}}]; - assertErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data"); + assertErrCodeAndErrMsgContains(coll, pipeline, ErrorCodes.ConversionFailure, "Trailing data"); })(); diff --git a/jstests/aggregation/expressions/date_from_string_on_error.js b/jstests/aggregation/expressions/date_from_string_on_error.js index ba0ce0fa573..2947c8ed35f 100644 --- a/jstests/aggregation/expressions/date_from_string_on_error.js +++ b/jstests/aggregation/expressions/date_from_string_on_error.js @@ -166,7 +166,7 @@ .toArray()); // Test that 'onError' is ignored when the 'format' is invalid. - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, [{ $project: { @@ -178,7 +178,7 @@ 40684, "$dateFromString requires that 'format' be a string"); - assertErrMsgContains( + assertErrCodeAndErrMsgContains( coll, [{ $project: { diff --git a/jstests/aggregation/expressions/date_to_string.js b/jstests/aggregation/expressions/date_to_string.js index dad8775a3e8..a1cbaa83fd9 100644 --- a/jstests/aggregation/expressions/date_to_string.js +++ b/jstests/aggregation/expressions/date_to_string.js @@ -239,11 +239,13 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode $project: {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: 5}}} }]; - assertErrMsgContains(coll, pipeline, 18533, "$dateToString requires that 'format' be a string"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 18533, "$dateToString requires that 'format' be a string"); pipeline = [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", timezone: "$tz"}}}}]; - assertErrMsgContains(coll, pipeline, 18628, "Missing 'date' parameter to $dateToString"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 18628, "Missing 'date' parameter to $dateToString"); pipeline = [{ $project: { @@ -256,10 +258,11 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } } }]; - assertErrMsgContains(coll, pipeline, 40517, "timezone must evaluate to a string"); + assertErrCodeAndErrMsgContains(coll, pipeline, 40517, "timezone must evaluate to a string"); pipeline = [{$project: {date: {$dateToString: {format: "%Y-%m-%d %H:%M:%S", date: 42}}}}]; - assertErrMsgContains(coll, pipeline, 16006, "can't convert from BSON type double to Date"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 16006, "can't convert from BSON type double to Date"); pipeline = [{ $project: { @@ -272,13 +275,13 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode } } }]; - assertErrMsgContains(coll, pipeline, 40485, "unrecognized time zone identifier"); + assertErrCodeAndErrMsgContains(coll, pipeline, 40485, "unrecognized time zone identifier"); pipeline = [{ $project: {date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%"}}} }]; - assertErrMsgContains(coll, pipeline, 18535, "Unmatched '%' at end of format string"); + assertErrCodeAndErrMsgContains(coll, pipeline, 18535, "Unmatched '%' at end of format string"); // Fails for unknown format specifier. pipeline = [{ @@ -286,5 +289,6 @@ load("jstests/aggregation/extras/utils.js"); // For assertErrorCode date: {$dateToString: {date: new ISODate("2017-01-04T15:08:51.911Z"), format: "%n"}} } }]; - assertErrMsgContains(coll, pipeline, 18536, "Invalid format character '%n' in format string"); + assertErrCodeAndErrMsgContains( + coll, pipeline, 18536, "Invalid format character '%n' in format string"); })(); diff --git a/jstests/aggregation/extras/utils.js b/jstests/aggregation/extras/utils.js index 3a8bbf5d071..57f61956792 100644 --- a/jstests/aggregation/extras/utils.js +++ b/jstests/aggregation/extras/utils.js @@ -269,7 +269,7 @@ function assertErrorCode(coll, pipe, code, errmsg, options = {}) { * Assert that an aggregation fails with a specific code and the error message contains the given * string. */ -function assertErrMsgContains(coll, pipe, code, expectedMessage) { +function assertErrCodeAndErrMsgContains(coll, pipe, code, expectedMessage) { const response = assert.commandFailedWithCode( coll.getDB().runCommand({aggregate: coll.getName(), pipeline: pipe, cursor: {}}), code); assert.neq( @@ -278,6 +278,31 @@ function assertErrMsgContains(coll, pipe, code, expectedMessage) { "Error message did not contain '" + expectedMessage + "', found:\n" + tojson(response)); } +/** + * Assert that an aggregation fails with any code and the error message contains the given + * string. + */ +function assertErrMsgContains(coll, pipe, expectedMessage) { + const response = assert.commandFailed( + coll.getDB().runCommand({aggregate: coll.getName(), pipeline: pipe, cursor: {}})); + assert.neq( + -1, + response.errmsg.indexOf(expectedMessage), + "Error message did not contain '" + expectedMessage + "', found:\n" + tojson(response)); +} + +/** + * Assert that an aggregation fails with any code and the error message does not contain the given + * string. + */ +function assertErrMsgDoesNotContain(coll, pipe, expectedMessage) { + const response = assert.commandFailed( + coll.getDB().runCommand({aggregate: coll.getName(), pipeline: pipe, cursor: {}})); + assert.eq(-1, + response.errmsg.indexOf(expectedMessage), + "Error message contained '" + expectedMessage + "'"); +} + /** * Asserts that two arrays are equal - that is, if their sizes are equal and each element in * the 'actual' array has a matching element in the 'expected' array, without honoring elements diff --git a/jstests/aggregation/single_stage_alias_error.js b/jstests/aggregation/single_stage_alias_error.js new file mode 100644 index 00000000000..cdf440093cb --- /dev/null +++ b/jstests/aggregation/single_stage_alias_error.js @@ -0,0 +1,41 @@ +/** + * Tests to verify that single aggregation stages that are input into an aggregation pipeline by + * the user under an aliased name use that name when reporting errors back to the user. + */ + +(function() { + "use strict"; + + // For assertErrMessageContains and assertErrMessageDoesNotContain. + load("jstests/aggregation/extras/utils.js"); + const coll = db.single_stage_alias_error; + + coll.drop(); + + // Assert that, despite the fact $set and $addFields are internally identical, error messages + // use only the name used by the user. + var pipeline = [{'$set': {}}]; + assertErrMsgContains(coll, pipeline, "$set"); + assertErrMsgDoesNotContain(coll, pipeline, "$addFields"); + + pipeline = [{'$addFields': {}}]; + assertErrMsgContains(coll, pipeline, "$addFields"); + assertErrMsgDoesNotContain(coll, pipeline, "$set"); + + // Assert that, despite the fact $unset is an alias for an exclusion projection, error messages + // use only the name used by the user. + pipeline = [{'$unset': [""]}]; + assertErrMsgContains(coll, pipeline, "$unset"); + assertErrMsgDoesNotContain(coll, pipeline, "$project"); + + pipeline = [{'$project': [""]}]; + assertErrMsgContains(coll, pipeline, "$project"); + assertErrMsgDoesNotContain(coll, pipeline, "$unset"); + + // Assert that, despite the fact that $replaceWith is just an alias for $replaceRoot, error + // messages contain syntax that matches the documentation for whichever name the user inputs. + var doc = {'_id': 0}; + coll.insert(doc); + pipeline = [{'$replaceWith': "abc"}]; + +})(); diff --git a/jstests/sharding/agg_error_reports_shard_host_and_port.js b/jstests/sharding/agg_error_reports_shard_host_and_port.js index 3a73c1d2493..5c9fd65d8f2 100644 --- a/jstests/sharding/agg_error_reports_shard_host_and_port.js +++ b/jstests/sharding/agg_error_reports_shard_host_and_port.js @@ -3,7 +3,7 @@ (function() { "use strict"; - load("jstests/aggregation/extras/utils.js"); // For assertErrMsgContains. + load("jstests/aggregation/extras/utils.js"); // For assertErrCodeAndErrMsgContains. const st = new ShardingTest({shards: 2, config: 1}); @@ -28,7 +28,7 @@ const pipe = [{$project: {a: {$divide: ["$_id", 0]}}}]; const divideByZeroErrorCode = 16608; - assertErrMsgContains(coll, pipe, divideByZeroErrorCode, st.rs1.getPrimary().host); + assertErrCodeAndErrMsgContains(coll, pipe, divideByZeroErrorCode, st.rs1.getPrimary().host); st.stop(); }()); diff --git a/jstests/sharding/merge_requires_unique_index.js b/jstests/sharding/merge_requires_unique_index.js index 78ee6c7f9eb..868b40c0e7e 100644 --- a/jstests/sharding/merge_requires_unique_index.js +++ b/jstests/sharding/merge_requires_unique_index.js @@ -215,20 +215,20 @@ {"newField.subField": 1, proofOfUpdate: 1, _id: 0}), {newField: {subField: "hi"}, proofOfUpdate: "PROOF"}); } else { - assertErrMsgContains(sourceColl, - [{ - $merge: { - into: { - db: targetColl.getDB().getName(), - coll: targetColl.getName(), - }, - whenMatched: "replace", - whenNotMatched: "insert", - on: Object.keys(dottedPathIndexSpec) - } - }], - ErrorCodes.ImmutableField, - "did you attempt to modify the _id or the shard key?"); + assertErrCodeAndErrMsgContains(sourceColl, + [{ + $merge: { + into: { + db: targetColl.getDB().getName(), + coll: targetColl.getName(), + }, + whenMatched: "replace", + whenNotMatched: "insert", + on: Object.keys(dottedPathIndexSpec) + } + }], + ErrorCodes.ImmutableField, + "did you attempt to modify the _id or the shard key?"); assert.doesNotThrow(() => sourceColl.aggregate([ {$project: {_id: 0}}, diff --git a/src/mongo/db/pipeline/document_source_add_fields.cpp b/src/mongo/db/pipeline/document_source_add_fields.cpp index 319ef9776c6..8784a5ebf16 100644 --- a/src/mongo/db/pipeline/document_source_add_fields.cpp +++ b/src/mongo/db/pipeline/document_source_add_fields.cpp @@ -50,14 +50,23 @@ REGISTER_DOCUMENT_SOURCE(set, DocumentSourceAddFields::createFromBson); intrusive_ptr DocumentSourceAddFields::create( - BSONObj addFieldsSpec, const intrusive_ptr& expCtx, StringData stageName) { + BSONObj addFieldsSpec, + const intrusive_ptr& expCtx, + StringData userSpecifiedName) { const bool isIndependentOfAnyCollection = false; intrusive_ptr addFields( new DocumentSourceSingleDocumentTransformation( expCtx, - ParsedAddFields::create(expCtx, addFieldsSpec), - stageName.toString(), + [&]() { + try { + return ParsedAddFields::create(expCtx, addFieldsSpec); + } catch (DBException& ex) { + ex.addContext("Invalid " + userSpecifiedName.toString()); + throw; + } + }(), + userSpecifiedName.toString(), isIndependentOfAnyCollection)); return addFields; } diff --git a/src/mongo/db/pipeline/document_source_project.cpp b/src/mongo/db/pipeline/document_source_project.cpp index 50c6f3defb6..419351b20e5 100644 --- a/src/mongo/db/pipeline/document_source_project.cpp +++ b/src/mongo/db/pipeline/document_source_project.cpp @@ -62,15 +62,26 @@ BSONObj buildExclusionProjectionSpecification(const std::vector& un } // namespace intrusive_ptr DocumentSourceProject::create( - BSONObj projectSpec, const intrusive_ptr& expCtx) { + BSONObj projectSpec, const intrusive_ptr& expCtx, StringData specifiedName) { const bool isIndependentOfAnyCollection = false; intrusive_ptr project(new DocumentSourceSingleDocumentTransformation( expCtx, - ParsedAggregationProjection::create( - expCtx, - projectSpec, - {ProjectionPolicies::DefaultIdPolicy::kIncludeId, - ProjectionPolicies::ArrayRecursionPolicy::kRecurseNestedArrays}), + [&]() { + // The ParsedAggregationProjection will internally perform a check to see if the + // provided specification is valid, and throw an exception if it was not. The exception + // is caught here so we can add the name that was actually specified by the user, be it + // $project or an alias. + try { + return ParsedAggregationProjection::create( + expCtx, + projectSpec, + {ProjectionPolicies::DefaultIdPolicy::kIncludeId, + ProjectionPolicies::ArrayRecursionPolicy::kRecurseNestedArrays}); + } catch (DBException& ex) { + ex.addContext("Invalid " + specifiedName.toString()); + throw; + } + }(), DocumentSourceProject::kStageName.rawData(), isIndependentOfAnyCollection)); return project; @@ -80,7 +91,7 @@ intrusive_ptr DocumentSourceProject::createFromBson( BSONElement elem, const intrusive_ptr& expCtx) { if (elem.fieldNameStringData() == kStageName) { uassert(15969, "$project specification must be an object", elem.type() == BSONType::Object); - return DocumentSourceProject::create(elem.Obj(), expCtx); + return DocumentSourceProject::create(elem.Obj(), expCtx, elem.fieldNameStringData()); } invariant(elem.fieldNameStringData() == kAliasNameUnset); @@ -99,7 +110,8 @@ intrusive_ptr DocumentSourceProject::createFromBson( std::all_of(unsetSpec.cbegin(), unsetSpec.cend(), [](BSONElement elem) { return elem.type() == BSONType::String; })); - return DocumentSourceProject::create(buildExclusionProjectionSpecification(unsetSpec), expCtx); + return DocumentSourceProject::create( + buildExclusionProjectionSpecification(unsetSpec), expCtx, elem.fieldNameStringData()); } } // namespace mongo diff --git a/src/mongo/db/pipeline/document_source_project.h b/src/mongo/db/pipeline/document_source_project.h index d1e556e8ccc..43ee90fce0f 100644 --- a/src/mongo/db/pipeline/document_source_project.h +++ b/src/mongo/db/pipeline/document_source_project.h @@ -48,7 +48,9 @@ public: * Convenience method to create a $project stage from 'projectSpec'. */ static boost::intrusive_ptr create( - BSONObj projectSpec, const boost::intrusive_ptr& expCtx); + BSONObj projectSpec, + const boost::intrusive_ptr& expCtx, + StringData specifiedName); /** * Parses a $project stage from the user-supplied BSON. diff --git a/src/mongo/db/pipeline/document_source_project_test.cpp b/src/mongo/db/pipeline/document_source_project_test.cpp index 1b7696adad6..4f8f800c79d 100644 --- a/src/mongo/db/pipeline/document_source_project_test.cpp +++ b/src/mongo/db/pipeline/document_source_project_test.cpp @@ -61,8 +61,8 @@ using ProjectStageTest = AggregationContextFixture; using UnsetTest = AggregationContextFixture; TEST_F(ProjectStageTest, InclusionProjectionShouldRemoveUnspecifiedFields) { - auto project = - DocumentSourceProject::create(BSON("a" << true << "c" << BSON("d" << true)), getExpCtx()); + auto project = DocumentSourceProject::create( + BSON("a" << true << "c" << BSON("d" << true)), getExpCtx(), "$project"_sd); auto source = DocumentSourceMock::createForTest("{_id: 0, a: 1, b: 1, c: {d: 1}}"); project->setSource(source.get()); // The first result exists and is as expected. @@ -78,7 +78,9 @@ TEST_F(ProjectStageTest, InclusionProjectionShouldRemoveUnspecifiedFields) { TEST_F(ProjectStageTest, ShouldOptimizeInnerExpressions) { auto project = DocumentSourceProject::create( - BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))), getExpCtx()); + BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))), + getExpCtx(), + "$project"_sd); project->optimize(); // The $and should have been replaced with its only argument. vector serializedArray; @@ -100,7 +102,7 @@ TEST_F(ProjectStageTest, ShouldErrorOnNonObjectSpec) { * projection. */ TEST_F(ProjectStageTest, InclusionShouldBeAbleToProcessMultipleDocuments) { - auto project = DocumentSourceProject::create(BSON("a" << true), getExpCtx()); + auto project = DocumentSourceProject::create(BSON("a" << true), getExpCtx(), "$project"_sd); auto source = DocumentSourceMock::createForTest({"{a: 1, b: 2}", "{a: 3, b: 4}"}); project->setSource(source.get()); auto next = project->getNext(); @@ -123,7 +125,7 @@ TEST_F(ProjectStageTest, InclusionShouldBeAbleToProcessMultipleDocuments) { * projection. */ TEST_F(ProjectStageTest, ExclusionShouldBeAbleToProcessMultipleDocuments) { - auto project = DocumentSourceProject::create(BSON("a" << false), getExpCtx()); + auto project = DocumentSourceProject::create(BSON("a" << false), getExpCtx(), "$project"_sd); auto source = DocumentSourceMock::createForTest({"{a: 1, b: 2}", "{a: 3, b: 4}"}); project->setSource(source.get()); auto next = project->getNext(); @@ -142,7 +144,7 @@ TEST_F(ProjectStageTest, ExclusionShouldBeAbleToProcessMultipleDocuments) { } TEST_F(ProjectStageTest, ShouldPropagatePauses) { - auto project = DocumentSourceProject::create(BSON("a" << false), getExpCtx()); + auto project = DocumentSourceProject::create(BSON("a" << false), getExpCtx(), "$project"_sd); auto source = DocumentSourceMock::createForTest({Document(), DocumentSource::GetNextResult::makePauseExecution(), @@ -167,7 +169,8 @@ TEST_F(ProjectStageTest, ShouldPropagatePauses) { TEST_F(ProjectStageTest, InclusionShouldAddDependenciesOfIncludedAndComputedFields) { auto project = DocumentSourceProject::create( fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"), - getExpCtx()); + getExpCtx(), + "$project"_sd); DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore); ASSERT_EQUALS(DepsTracker::State::EXHAUSTIVE_FIELDS, project->getDependencies(&dependencies)); ASSERT_EQUALS(5U, dependencies.fields.size()); @@ -189,7 +192,8 @@ TEST_F(ProjectStageTest, InclusionShouldAddDependenciesOfIncludedAndComputedFiel } TEST_F(ProjectStageTest, ExclusionShouldNotAddDependencies) { - auto project = DocumentSourceProject::create(fromjson("{a: false, 'b.c': false}"), getExpCtx()); + auto project = DocumentSourceProject::create( + fromjson("{a: false, 'b.c': false}"), getExpCtx(), "$project"_sd); DepsTracker dependencies; ASSERT_EQUALS(DepsTracker::State::SEE_NEXT, project->getDependencies(&dependencies)); @@ -202,7 +206,8 @@ TEST_F(ProjectStageTest, ExclusionShouldNotAddDependencies) { TEST_F(ProjectStageTest, InclusionProjectionReportsIncludedPathsFromGetModifiedPaths) { auto project = DocumentSourceProject::create( fromjson("{a: true, 'b.c': {d: true}, e: {f: {g: true}}, h: {i: {$literal: true}}}"), - getExpCtx()); + getExpCtx(), + "$project"_sd); auto modifiedPaths = project->getModifiedPaths(); ASSERT(modifiedPaths.type == DocumentSource::GetModPathsReturn::Type::kAllExcept); @@ -216,7 +221,8 @@ TEST_F(ProjectStageTest, InclusionProjectionReportsIncludedPathsFromGetModifiedP TEST_F(ProjectStageTest, InclusionProjectionReportsIncludedPathsButExcludesId) { auto project = DocumentSourceProject::create( fromjson("{_id: false, 'b.c': {d: true}, e: {f: {g: true}}, h: {i: {$literal: true}}}"), - getExpCtx()); + getExpCtx(), + "$project"_sd); auto modifiedPaths = project->getModifiedPaths(); ASSERT(modifiedPaths.type == DocumentSource::GetModPathsReturn::Type::kAllExcept); @@ -227,7 +233,7 @@ TEST_F(ProjectStageTest, InclusionProjectionReportsIncludedPathsButExcludesId) { TEST_F(ProjectStageTest, ExclusionProjectionReportsExcludedPathsAsModifiedPaths) { auto project = DocumentSourceProject::create( - fromjson("{a: false, 'b.c': {d: false}, e: {f: {g: false}}}"), getExpCtx()); + fromjson("{a: false, 'b.c': {d: false}, e: {f: {g: false}}}"), getExpCtx(), "$project"_sd); auto modifiedPaths = project->getModifiedPaths(); ASSERT(modifiedPaths.type == DocumentSource::GetModPathsReturn::Type::kFiniteSet); @@ -239,7 +245,9 @@ TEST_F(ProjectStageTest, ExclusionProjectionReportsExcludedPathsAsModifiedPaths) TEST_F(ProjectStageTest, ExclusionProjectionReportsExcludedPathsWithIdExclusion) { auto project = DocumentSourceProject::create( - fromjson("{_id: false, 'b.c': {d: false}, e: {f: {g: false}}}"), getExpCtx()); + fromjson("{_id: false, 'b.c': {d: false}, e: {f: {g: false}}}"), + getExpCtx(), + "$project"_sd); auto modifiedPaths = project->getModifiedPaths(); ASSERT(modifiedPaths.type == DocumentSource::GetModPathsReturn::Type::kFiniteSet); @@ -251,7 +259,9 @@ TEST_F(ProjectStageTest, ExclusionProjectionReportsExcludedPathsWithIdExclusion) TEST_F(ProjectStageTest, CanUseRemoveSystemVariableToConditionallyExcludeProjectedField) { auto project = DocumentSourceProject::create( - fromjson("{a: 1, b: {$cond: [{$eq: ['$b', 4]}, '$$REMOVE', '$b']}}"), getExpCtx()); + fromjson("{a: 1, b: {$cond: [{$eq: ['$b', 4]}, '$$REMOVE', '$b']}}"), + getExpCtx(), + "$project"_sd); auto source = DocumentSourceMock::createForTest({"{a: 2, b: 2}", "{a: 3, b: 4}"}); project->setSource(source.get()); auto next = project->getNext(); @@ -268,7 +278,8 @@ TEST_F(ProjectStageTest, CanUseRemoveSystemVariableToConditionallyExcludeProject } TEST_F(ProjectStageTest, ProjectionCorrectlyReportsRenamesForwards) { - auto project = DocumentSourceProject::create(fromjson("{'renamedB' : '$b'}"), getExpCtx()); + auto project = + DocumentSourceProject::create(fromjson("{'renamedB' : '$b'}"), getExpCtx(), "$project"_sd); auto renames = semantic_analysis::renamedPaths({"b"}, *project, semantic_analysis::Direction::kForward); // renamedPaths should return a mapping of old name->new name for each path in interestingPaths @@ -281,7 +292,8 @@ TEST_F(ProjectStageTest, ProjectionCorrectlyReportsRenamesForwards) { } TEST_F(ProjectStageTest, ProjectionCorrectlyReportsRenamesBackwards) { - auto project = DocumentSourceProject::create(fromjson("{'renamedB' : '$b'}"), getExpCtx()); + auto project = + DocumentSourceProject::create(fromjson("{'renamedB' : '$b'}"), getExpCtx(), "$project"_sd); auto renames = semantic_analysis::renamedPaths( {"renamedB"}, *project, semantic_analysis::Direction::kBackward); auto single_rename = renames->extract("renamedB"); @@ -306,7 +318,9 @@ BSONObj makeProjectForNestedDocument(size_t depth) { TEST_F(ProjectStageTest, CanAddNestedDocumentExactlyAtDepthLimit) { auto project = DocumentSourceProject::create( - makeProjectForNestedDocument(BSONDepth::getMaxAllowableDepth()), getExpCtx()); + makeProjectForNestedDocument(BSONDepth::getMaxAllowableDepth()), + getExpCtx(), + "$project"_sd); auto mock = DocumentSourceMock::createForTest(Document{{"_id", 1}}); project->setSource(mock.get()); @@ -315,11 +329,12 @@ TEST_F(ProjectStageTest, CanAddNestedDocumentExactlyAtDepthLimit) { } TEST_F(ProjectStageTest, CannotAddNestedDocumentExceedingDepthLimit) { - ASSERT_THROWS_CODE( - DocumentSourceProject::create( - makeProjectForNestedDocument(BSONDepth::getMaxAllowableDepth() + 1), getExpCtx()), - AssertionException, - ErrorCodes::Overflow); + ASSERT_THROWS_CODE(DocumentSourceProject::create( + makeProjectForNestedDocument(BSONDepth::getMaxAllowableDepth() + 1), + getExpCtx(), + "$project"_sd), + AssertionException, + ErrorCodes::Overflow); } TEST_F(UnsetTest, AcceptsValidUnsetSpecWithArray) { diff --git a/src/mongo/db/pipeline/document_source_replace_root.cpp b/src/mongo/db/pipeline/document_source_replace_root.cpp index deefe509bb7..e494fe1ea2a 100644 --- a/src/mongo/db/pipeline/document_source_replace_root.cpp +++ b/src/mongo/db/pipeline/document_source_replace_root.cpp @@ -46,15 +46,28 @@ Document ReplaceRootTransformation::applyTransformation(const Document& input) { // Extract subdocument in the form of a Value. Value newRoot = _newRoot->evaluate(input, &_expCtx->variables); + // To ensure an accurate user-facing message, any user-facing syntax that uses this stage + // internally must provide an message opener that complies with its documentation. + StringData msgOpener = [&]() { + switch (_specifiedName) { + case UserSpecifiedName::kReplaceRoot: + return "'newRoot' expression "_sd; + case UserSpecifiedName::kReplaceWith: + return "'replacement document' "_sd; + default: + MONGO_UNREACHABLE; + } + }(); + // The newRoot expression, if it exists, must evaluate to an object. uassert(40228, - str::stream() - << "'newRoot' expression must evaluate to an object, but resulting value was: " - << newRoot.toString() - << ". Type of resulting value: '" - << typeName(newRoot.getType()) - << "'. Input document: " - << input.toString(), + str::stream() << msgOpener.toString() + << "must evaluate to an object, but resulting value was: " + << newRoot.toString() + << ". Type of resulting value: '" + << typeName(newRoot.getType()) + << "'. Input document: " + << input.toString(), newRoot.getType() == BSONType::Object); // Turn the value into a document. @@ -104,7 +117,11 @@ intrusive_ptr DocumentSourceReplaceRoot::createFromBson( const bool isIndependentOfAnyCollection = false; return new DocumentSourceSingleDocumentTransformation( expCtx, - std::make_unique(expCtx, newRootExpression), + std::make_unique( + expCtx, + newRootExpression, + (stageName == kStageName) ? ReplaceRootTransformation::UserSpecifiedName::kReplaceRoot + : ReplaceRootTransformation::UserSpecifiedName::kReplaceWith), kStageName.toString(), isIndependentOfAnyCollection); } diff --git a/src/mongo/db/pipeline/document_source_replace_root.h b/src/mongo/db/pipeline/document_source_replace_root.h index 290919615bc..917b221e20c 100644 --- a/src/mongo/db/pipeline/document_source_replace_root.h +++ b/src/mongo/db/pipeline/document_source_replace_root.h @@ -40,9 +40,12 @@ namespace mongo { */ class ReplaceRootTransformation final : public TransformerInterface { public: + enum class UserSpecifiedName { kReplaceRoot, kReplaceWith }; + ReplaceRootTransformation(const boost::intrusive_ptr& expCtx, - boost::intrusive_ptr newRootExpression) - : _expCtx(expCtx), _newRoot(std::move(newRootExpression)) {} + boost::intrusive_ptr newRootExpression, + UserSpecifiedName specifiedName) + : _expCtx(expCtx), _newRoot(std::move(newRootExpression)), _specifiedName(specifiedName) {} TransformerType getType() const final { return TransformerType::kReplaceRoot; @@ -79,6 +82,7 @@ public: private: const boost::intrusive_ptr _expCtx; boost::intrusive_ptr _newRoot; + UserSpecifiedName _specifiedName; }; /* diff --git a/src/mongo/db/pipeline/parsed_add_fields.cpp b/src/mongo/db/pipeline/parsed_add_fields.cpp index 2e2a1602414..d756c444fa7 100644 --- a/src/mongo/db/pipeline/parsed_add_fields.cpp +++ b/src/mongo/db/pipeline/parsed_add_fields.cpp @@ -42,7 +42,7 @@ namespace parsed_aggregation_projection { std::unique_ptr ParsedAddFields::create( const boost::intrusive_ptr& expCtx, const BSONObj& spec) { // Verify that we don't have conflicting field paths, etc. - ProjectionSpecValidator::uassertValid(spec, "$addFields"); + ProjectionSpecValidator::uassertValid(spec); std::unique_ptr parsedAddFields = std::make_unique(expCtx); // Actually parse the specification. diff --git a/src/mongo/db/pipeline/parsed_aggregation_projection.cpp b/src/mongo/db/pipeline/parsed_aggregation_projection.cpp index 3f283079ac4..058e20b6d0b 100644 --- a/src/mongo/db/pipeline/parsed_aggregation_projection.cpp +++ b/src/mongo/db/pipeline/parsed_aggregation_projection.cpp @@ -55,13 +55,8 @@ using expression::isPathPrefixOf; // ProjectionSpecValidator // -void ProjectionSpecValidator::uassertValid(const BSONObj& spec, StringData stageName) { - try { - ProjectionSpecValidator(spec).validate(); - } catch (DBException& ex) { - ex.addContext("Invalid " + stageName.toString()); - throw; - } +void ProjectionSpecValidator::uassertValid(const BSONObj& spec) { + ProjectionSpecValidator(spec).validate(); } void ProjectionSpecValidator::ensurePathDoesNotConflictOrThrow(const std::string& path) { @@ -314,11 +309,8 @@ std::unique_ptr ParsedAggregationProjection::create const boost::intrusive_ptr& expCtx, const BSONObj& spec, ProjectionPolicies policies) { - // Check that the specification was valid. Status returned is unspecific because validate() - // is used by the $addFields stage as well as $project. - // If there was an error, uassert with a $project-specific message. - ProjectionSpecValidator::uassertValid(spec, "$project"); - + // Checks that the specification was valid, and throws if it is not. + ProjectionSpecValidator::uassertValid(spec); // Check for any conflicting specifications, and determine the type of the projection. auto projectionType = ProjectTypeParser::parse(spec, policies); // kComputed is a projection type reserved for $addFields, and should never be detected by the diff --git a/src/mongo/db/pipeline/parsed_aggregation_projection.h b/src/mongo/db/pipeline/parsed_aggregation_projection.h index 0542847e9ba..25a6dc025cb 100644 --- a/src/mongo/db/pipeline/parsed_aggregation_projection.h +++ b/src/mongo/db/pipeline/parsed_aggregation_projection.h @@ -55,10 +55,11 @@ namespace parsed_aggregation_projection { class ProjectionSpecValidator { public: /** - * Throws if the specification is not valid for a projection. The stageName is used to provide a - * more helpful error message. + * Throws if the specification is not valid for a projection. Because this validator is meant to + * be generic, the error thrown is generic. Callers at the DocumentSource level should modify + * the error message if they want to include information specific to the stage name used. */ - static void uassertValid(const BSONObj& spec, StringData stageName); + static void uassertValid(const BSONObj& spec); private: ProjectionSpecValidator(const BSONObj& spec) : _rawObj(spec) {} diff --git a/src/mongo/db/pipeline/pipeline_test.cpp b/src/mongo/db/pipeline/pipeline_test.cpp index 3d59eff1dfa..18e149ab0c1 100644 --- a/src/mongo/db/pipeline/pipeline_test.cpp +++ b/src/mongo/db/pipeline/pipeline_test.cpp @@ -3326,12 +3326,12 @@ TEST(PipelineRenameTracking, CanHandleBackAndForthRename) { TEST(InvolvedNamespacesTest, NoInvolvedNamespacesForMatchSortProject) { boost::intrusive_ptr expCtx(new ExpressionContextForTest()); - auto pipeline = unittest::assertGet( - Pipeline::create({DocumentSourceMock::createForTest(), - DocumentSourceMatch::create(BSON("x" << 1), expCtx), - DocumentSourceSort::create(expCtx, BSON("y" << -1)), - DocumentSourceProject::create(BSON("x" << 1 << "y" << 1), expCtx)}, - expCtx)); + auto pipeline = unittest::assertGet(Pipeline::create( + {DocumentSourceMock::createForTest(), + DocumentSourceMatch::create(BSON("x" << 1), expCtx), + DocumentSourceSort::create(expCtx, BSON("y" << -1)), + DocumentSourceProject::create(BSON("x" << 1 << "y" << 1), expCtx, "$project"_sd)}, + expCtx)); auto involvedNssSet = pipeline->getInvolvedCollections(); ASSERT(involvedNssSet.empty()); }