0
0
mirror of https://github.com/mongodb/mongo.git synced 2024-12-01 09:32:32 +01:00
mongodb/etc/evergreen.yml
2020-03-20 17:56:48 +00:00

12178 lines
412 KiB
YAML

#####################################################
# A note on expansions #
#####################################################
# Expansions usually appear in the form ${key|default}
# If 'key' is found in the executor's map of currently known
# expansions, the corresponding value is used. If the key can
# not be found, the default is used.
#
# Arbitrary expansions can be specified in the YAML configuration
# files in the following places:
# - The 'expansions' field for buildvariants (branch file)
# - The 'expansions' field for distros (distros file)
#
# A number of 'built-in' expansions are also available for use; these include:
# - environment variables available on the host machine
# - 'workdir' (references the executor's work directory).
# - 'task_id' (references the task id of the task the executor is working on).
# - 'build_variant' (references the executing task's buildvariant).
# - 'config_root' (references the root directory for the executor's configuration artifacts).
stepback: true
command_type: system
pre_error_fails_task: true
# Files that match an ignore-list pattern will not trigger a build, if they're the only modified
# files in the patch.
ignore:
- ".*"
- "!.clang-format"
- "!.eslintrc.yml"
- "*.md"
- "*.rst"
- "*.txt"
- "/distsrc/**"
- "/docs/**"
- "/etc/*.yml"
- "!/etc/evergreen.yml"
- "README"
## Some variables for convenience:
variables:
# Used when the tests it runs depend only on mongod, mongos, the mongo shell and the tools.
- &task_template
name: template
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --help
resmoke_jobs_max: 0 # No cap on number of jobs.
# Used for tests that invoke resmoke.py and require no additional setup.
- &task_depending_on_all_template
<<: *task_template
depends_on:
- name: compile_all
- &benchmark_template
name: benchmark_template
depends_on:
- name: compile_benchmarks
commands:
- func: "do benchmark setup"
- func: "run tests"
vars:
resmoke_args: --help
resmoke_jobs_max: 1
- func: "send benchmark results"
- &jepsen_config_vars
jepsen_key_time_limit: --key-time-limit 15
jepsen_protocol_version: --protocol-version 1
jepsen_read_concern: ""
jepsen_read_with_find_and_modify: ""
jepsen_storage_engine: ""
jepsen_test_name: ""
# Empirically, we've had greater success in reproducing the issues found in MongoDB versions
# 3.4.0-rc3 and 3.4.0-rc4 when running Jepsen with at least --time-limit=600.
jepsen_time_limit: --time-limit 1200
jepsen_write_concern: ""
# Template for running Jepsen tests
- &run_jepsen_template
name: run_jepsen_template
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
- &jstestfuzz_config_vars
num_files: 15
num_tasks: 1
resmoke_args: --help # resmoke_args needs to be overridden to specify one of the jstestfuzz suites
resmoke_jobs_max: 1
should_shuffle: false
continue_on_failure: false
# Terminate the function when there has been no output to stdout for 30 minutes. E.g. when something is stuck in an infinite loop.
# resmoke.py writes the test output to logkeeper and only writes to stdout when starting the next test.
# resmoke.py not producing output on stdout means that the test is still running and presumably not going to finish.
# Note that timeout_secs is different from exec_timeout_secs, which applies to a task and times out regardless of whether output has been written to stdout.
timeout_secs: 1800
# Used for tests that invoke 'resmoke.py --suites=jstestfuzz*'.
- &jstestfuzz_template
name: jstestfuzz_template
exec_timeout_secs: 14400 # Time out the task if it runs for more than 4 hours.
depends_on: []
commands:
- func: "generate fuzzer tasks"
- &multiversion_template
name: multiversion_template
exec_timeout_secs: 14400 # Time out the task if it runs for more than 4 hours.
depends_on: []
commands:
- func: "generate multiversion tasks"
# Templates used by powercycle
- &powercycle_remote_credentials
private_key_file: $(${posix_workdir})/powercycle.pem
private_key_remote: ${powercycle_private_key}
aws_key_remote: ${powercycle_aws_key}
aws_secret_remote: ${powercycle_aws_secret}
- &powercycle_ec2_instance
aws_ec2_yml: aws_ec2.yml
ec2_expire_hours: "24"
ec2_monitor_files: proc.json system.json
monitor_proc_file: proc.json
monitor_system_file: system.json
remote_dir: /log/powercycle
secret_port: "20001"
security_group_ids: ${powercycle_aws_security_group_id}
security_groups: mci powercycle_testing
subnet_id: ${powercycle_aws_subnet_id}
ssh_identity: -i ${private_key_file}
ssh_key_id: ${powercycle_ssh_key_id}
standard_port: "20000"
virtualenv_dir: venv_powercycle
windows_crash_cmd: \"notmyfault/notmyfaultc64.exe -accepteula crash 1\"
windows_crash_dl: https://download.sysinternals.com/files/NotMyFault.zip
windows_crash_dir: notmyfault
windows_crash_zip: notmyfault.zip
- &powercycle_expansions
params:
updates:
- key: backup_path_after
value: ${remote_dir}/afterrecovery
- key: backup_path_before
value: ${remote_dir}/beforerecovery
- key: backup_artifacts
value: ${remote_dir}/afterrecovery* ${remote_dir}/beforerecovery*
- key: db_path
value: /data/db
- key: log_path
value: ${remote_dir}/mongod.log
- key: event_logpath
value: ${remote_dir}/eventlog
- key: exit_file
value: powercycle_exit.yml
- &powercycle_test
ec2_artifacts: ${log_path} ${db_path} ${backup_artifacts} ${event_logpath}
program_options: --exitYamlFile=${exit_file} --logLevel=info --backupPathBefore=${backup_path_before} --backupPathAfter=${backup_path_after}
connection_options: --sshUserHost=${private_ip_address} --sshConnection=\"${ssh_identity} ${ssh_connection_options}\"
test_options: --testLoops=15 --seedDocNum=10000 --rsync --rsyncExcludeFiles=diagnostic.data/metrics.interim* --validate=local --canary=local
crash_options: --crashMethod=internal --crashOption=${windows_crash_cmd} --crashWaitTime=45 --jitterForCrashWaitTime=5 --instanceId=${instance_id}
client_options: --numCrudClients=20 --numFsmClients=20
mongodb_options: --rootDir=${remote_dir}-${task_id} --mongodbBinDir=${remote_dir}
mongod_options: --mongodUsablePorts ${standard_port} ${secret_port} --dbPath=${db_path} --logPath=${log_path}
mongod_extra_options: --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}'\"
- &libfuzzertests
name: libfuzzertests!
execution_tasks:
- compile_libfuzzertests
- libfuzzertests
- &compile_task_group_template
name: compile_task_group_template
max_hosts: 1
tasks: []
setup_task:
- func: "apply compile expansions"
- func: "set task expansion macros"
teardown_task:
- func: "attach scons config log"
- func: "attach report"
- func: "attach artifacts"
- func: "kill processes"
- func: "save code coverage data"
- func: "save mongo coredumps"
- func: "save failed unittests"
- func: "save unstripped dbtest"
- func: "save hang analyzer debugger files"
- func: "save disk statistics"
- func: "save system resource information"
- command: shell.exec
type: setup
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
./buildscripts/merge_corpus.sh
- func: "archive new corpus"
- func: "upload new corpus"
- func: "remove files"
vars:
files: >-
src/resmoke_error_code
src/build/scons/config.log
src/*.gcda.gcov
src/gcov-intermediate-files.tgz
src/*.core src/*.mdmp
mongo-coredumps.tgz
src/unittest_binaries/*_test${exe}
mongo-unittests.tgz
src/debugger*.*
src/mongo-hanganalyzer.tgz
diskstats.tgz
system-resource-info.tgz
${report_file|src/report.json}
${archive_file|src/archive.json}
setup_group_can_fail_task: true
setup_group:
- func: "kill processes"
- func: "cleanup environment"
- func: "clear OOM messages"
- command: manifest.load
- func: "git get project"
- func: "get all modified patch files"
- func: "set task expansion macros"
# The python virtual environment is installed in ${workdir}, which is created in
# "set up virtualenv".
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "configure evergreen api credentials"
# NOTE: To disable the compile bypass feature, comment out the next line.
- func: "bypass compile and fetch binaries"
- func: "update bypass expansions"
- func: "get buildnumber"
- func: "set up credentials"
- func: "fetch and build OpenSSL"
- func: "use WiredTiger develop" # noop if ${use_wt_develop} is not "true"
- func: "set up win mount script"
- func: "generate compile expansions"
teardown_group:
- func: "umount shared scons directory"
- func: "print OOM messages"
- func: "cleanup environment"
timeout:
- func: "run hang analyzer"
# Use this template for enterprise Windows testing coverage on non-pushing
# variants
- &enterprise-windows-nopush-template
name: enterprise-windows-nopush-template
run_on:
- windows-64-vs2019-small
modules:
- enterprise
expansions: &enterprise-windows-nopush-expansions-template
additional_targets: archive-mongocryptd archive-mongocryptd-debug
msi_target: msi
exe: ".exe"
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
multiversion_platform: windows
multiversion_edition: enterprise
jstestfuzz_num_generated_files: 35
large_distro_name: windows-64-vs2019-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- windows-64-vs2019-large
- name: burn_in_tests_gen
- name: .aggfuzzer .common
- name: audit
- name: auth_audit_gen
- name: buildscripts_test
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.aggregation !.gcm
- name: external_auth
- name: external_auth_aws
- name: external_auth_windows
distros:
- windows-64-2016
- name: .jscore .common !.compat !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache
- name: replica_sets_auth_gen
- name: sasl
- name: sharding_auth_gen
- name: sharding_auth_audit_gen
- name: snmp
- &stitch_support_task_group_template
name: stitch_support_task_group_template
setup_task:
- func: "apply compile expansions"
- func: "set task expansion macros"
teardown_task:
setup_group_can_fail_task: true
setup_group:
- func: "kill processes"
- func: "cleanup environment"
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "get buildnumber"
- func: "set up win mount script"
- func: "generate compile expansions"
teardown_group:
- func: "umount shared scons directory"
# List of all variants that make mongocryptd
# If a variant is listed here and has a push task, mongocryptd is pushed
- mongocryptd_variants: &mongocryptd_variants
- enterprise-amazon2
- enterprise-debian92-64
- enterprise-debian10-64
- enterprise-linux-64-amazon-ami
- enterprise-macos
- enterprise-rhel-62-64-bit
- enterprise-rhel-62-64-bit-coverage
- enterprise-rhel-62-64-bit-inmem
- enterprise-rhel-62-64-bit-logv2
- enterprise-rhel-62-64-bit-flow-control-off
- enterprise-rhel-62-64-bit-majority-read-concern-off
- enterprise-rhel-62-64-bit-multiversion
- enterprise-rhel-62-64-bit-multi-txn-oplog-entries
- enterprise-rhel-62-64-bit-required-inmem
- enterprise-rhel-62-64-bit-required-majority-read-concern-off
- enterprise-rhel-67-s390x
- enterprise-rhel-70-64-bit
- enterprise-rhel-70-64-bit-kitchen-sink
- enterprise-rhel-70-64-bit-no-libunwind
- enterprise-rhel-71-ppc64le
- enterprise-rhel-71-ppc64le-inmem
- enterprise-rhel-72-s390x
- enterprise-rhel-72-s390x-inmem
- enterprise-rhel-80-64-bit
- enterprise-suse12-64
- enterprise-suse15-64
- enterprise-suse12-s390x
- enterprise-ubuntu-dynamic-1604-64-bit
- enterprise-ubuntu-dynamic-1604-clang
- enterprise-ubuntu1604-64
- enterprise-ubuntu1604-arm64
- enterprise-ubuntu1804-64
- enterprise-ubuntu1804-arm64
- enterprise-ubuntu1804-ppc64le
- enterprise-ubuntu1804-s390x
- enterprise-windows
- enterprise-windows-async
- enterprise-windows-compile-all
- enterprise-windows-debug-unoptimized
- enterprise-windows-inmem
- enterprise-windows-nopush-template
- enterprise-windows-required
- enterprise-windows-wtdevelop
- ubuntu1804-debug-asan
- ubuntu1804-debug-ubsan
- ubuntu1804-debug-aubsan-lite
- ubuntu1804-debug-aubsan-lite_fuzzer
- ubuntu1804-debug-aubsan-async
# List of all variants that make mh artifacts.
# If a variant is listed here and has a push task, the mh artifacts are pushed
- mh_variants: &mh_variants
- enterprise-debian92-64
- enterprise-macos
- enterprise-rhel-62-64-bit
- enterprise-rhel-70-64-bit
- enterprise-ubuntu1604-64
- enterprise-ubuntu1804-64
- enterprise-windows-required
# List of all variants that use the packages.tgz
- package_variants: &package_variants
- amazon
- enterprise-linux-64-amazon-ami
- amazon2
- enterprise-amazon2
- debian10
- enterprise-debian10-64
- debian92
- enterprise-debian92-64
- rhel62
- enterprise-rhel-62-64-bit
- enterprise-rhel-62-64-bit-coverage
- rhel-67-s390x
- enterprise-rhel-67-s390x
- rhel70
- ubi7
- rhel76_compile_rhel70
- enterprise-rhel-70-64-bit
- rhel-72-s390x
- enterprise-rhel-71-ppc64le
- enterprise-rhel-72-s390x
- rhel80
- enterprise-rhel-80-64-bit
- suse12
- suse12-s390x
- enterprise-suse12-64
- enterprise-suse12-s390x
- suse15
- enterprise-suse15-64
- ubuntu1604
- ubuntu1604-debug
- enterprise-ubuntu1604-64
- enterprise-ubuntu1604-arm64
- enterprise-ubuntu-dynamic-1604-64-bit
- enterprise-ubuntu-dynamic-1604-clang
- enterprise-ubuntu-dynamic-1604-clang-tidy
- ubuntu-dynamic-1604-clang
- ubuntu1804
- ubuntu1804-arm64
- ubuntu1804-debug-aubsan-async
- ubuntu1804-s390x
- enterprise-ubuntu1804-64
- enterprise-ubuntu1804-arm64
- enterprise-ubuntu1804-ppc64le
- enterprise-ubuntu1804-s390x
- enterprise-windows
- windows
#######################################
# Functions #
#######################################
functions:
"remove files": &remove_files
command: shell.exec
params:
script: |
if [ -z "${files}" ]; then
exit 0
fi
for file in ${files}
do
if [ -f "$file" ]; then
echo "Removing file $file"
rm -f $file
fi
done
"configure evergreen api credentials": &configure_evergreen_api_credentials
command: shell.exec
type: test
params:
working_dir: src
silent: true
script: |
# Create the Evergreen API credentials
cat > .evergreen.yml <<END_OF_CREDS
api_server_host: https://evergreen.mongodb.com/api
api_key: "${evergreen_api_key}"
user: "${evergreen_api_user}"
END_OF_CREDS
"configure selected tests credentials": &configure_selected_tests_credentials
command: shell.exec
type: test
params:
working_dir: src
silent: true
script: |
# Create the Evergreen API credentials
cat > .selected_tests.yml <<END_OF_CREDS
url: "https://selected-tests.server-tig.prod.corp.mongodb.com"
project: "${project}"
auth_user: "${selected_tests_auth_user}"
auth_token: "${selected_tests_auth_token}"
END_OF_CREDS
"git get project": &git_get_project
command: git.get_project
params:
directory: ${git_project_directory|src}
revisions: # for each module include revision as <module_name> : ${<module_name>_rev}
enterprise: ${enterprise_rev}
wtdevelop: ${wtdevelop_rev}
"fetch artifacts": &fetch_artifacts
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/artifacts/${build_id}.tgz
bucket: mciuploads
extract_to: src
"fetch packages": &fetch_packages
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/artifacts/${build_id}-packages.tgz
bucket: mciuploads
extract_to: src
build_variants: *package_variants
"fetch dist tarball": &fetch_dist_tarball
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/dist/mongo-${build_id}.${ext|tgz}
bucket: mciuploads
local_file: src/mongo-binaries.tgz
"fetch dist debugsymbols": &fetch_dist_debugsymbols
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/dist/mongo-${build_id}-debugsymbols.${ext|tgz}
bucket: mciuploads
local_file: src/mongo-debugsymbols.tgz
"fetch binaries": &fetch_binaries
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${mongo_binaries}
bucket: mciuploads
local_file: src/mongo-binaries.tgz
"extract binaries": &extract_binaries
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
${decompress|tar xzvf} mongo-binaries.tgz
"check binary version": &check_binary_version
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
mongo_binary=dist-test/bin/mongo${exe}
# For compile bypass we need to skip the binary version check since we can tag a commit
# after the base commit binaries were created. This would lead to a mismatch of the binaries
# and the version from git describe --abbrev=7 in the compile_expansions.yml.
if [ "${is_patch}" = "true" ] && [ "${bypass_compile|false}" = "true" ]; then
echo "Skipping binary version check since we are bypassing compile in this patch build."
exit 0
fi
${activate_virtualenv}
bin_ver=$($python -c "import yaml; print(yaml.safe_load(open('compile_expansions.yml'))['version']);" | tr -d '[ \r\n]')
# Due to SERVER-23810, we cannot use $mongo_binary --quiet --nodb --eval "version();"
mongo_ver=$($mongo_binary --version | perl -pe '/version v([^\"]*)/; $_ = $1;' | tr -d '[ \r\n]')
# The versions must match
if [ "$bin_ver" != "$mongo_ver" ]; then
echo "The mongo version is $mongo_ver, expected version is $bin_ver"
exit 1
fi
"fetch benchmarks": &fetch_benchmarks
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/benchmarks/${build_id}.tgz
bucket: mciuploads
extract_to: src
"fetch corpus": &fetch_corpus
command: s3.get
params:
aws_key: ${s3_access_key_id}
aws_secret: ${s3_secret_access_key}
remote_file: ${project}/corpus/mongo-${build_variant}-latest.tgz
bucket: fuzzer-artifacts
local_file: src/corpus.tgz
"extract corpus": &extract_corpus
command: archive.auto_extract
params:
path: src/corpus.tgz
destination: src/corpus
"fetch mongohouse binaries": &fetch_mongohouse_binaries
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${mh_archive}
bucket: mciuploads
local_file: src/mh.tgz
build_variants: *mh_variants
"extract mongohouse binaries": &extract_mongohouse_binaries
command: shell.exec
params:
working_dir: src
build_variants: *mh_variants
script: |
set -o errexit
${decompress} mh.tgz
"archive new corpus": &archive_new_corpus
command: archive.targz_pack
params:
target: corpus.tgz
source_dir: src/corpus
include:
- "**"
"upload new corpus": &upload_new_corpus
command: s3.put
params:
aws_key: ${s3_access_key_id}
aws_secret: ${s3_secret_access_key}
local_file: corpus.tgz
remote_file: ${project}/corpus/mongo-${build_variant}-latest.tgz
bucket: fuzzer-artifacts
permissions: private
content_type: ${content_type|application/gzip}
display_name: "Fuzzer Tests Corpus Tar Archive"
optional: true
"get buildnumber": &get_buildnumber
command: keyval.inc
params:
key: "${build_variant}_master"
destination: "builder_num"
"run diskstats": &run_diskstats
command: shell.exec
params:
background: true
system_log: true
script: |
set -o errexit
set -o verbose
# On Windows we can use typeperf.exe to dump performance counters.
if [ "Windows_NT" = "$OS" ]; then
typeperf -qx PhysicalDisk | grep Disk | grep -v _Total > disk_counters.txt
typeperf -cf disk_counters.txt -si 5 -o mongo-diskstats
# Linux: iostat -t option for timestamp.
elif iostat -tdmx > /dev/null 2>&1; then
iostat -tdmx 5 > mongo-diskstats
# OSX: Simulate the iostat timestamp.
elif iostat -d > /dev/null 2>&1; then
iostat -d -w 5 | while IFS= read -r line; do printf '%s %s\n' "$(date +'%m/%d/%Y %H:%M:%S')" "$line" >> mongo-diskstats; done
# Check if vmstat -t is available.
elif vmstat -td > /dev/null 2>&1; then
vmstat -td 5 > mongo-diskstats
# Check if vmstat -T d is available.
elif vmstat -T d > /dev/null 2>&1; then
vmstat -T d 5 > mongo-diskstats
else
printf "Cannot collect mongo-diskstats on this platform\n"
fi
"collect system resource info": &collect_system_resource_info
command: shell.exec
params:
working_dir: src
background: true
system_log: true
script: |
${activate_virtualenv}
$python buildscripts/collect_resource_info.py -o system_resource_info.json -i 5
# Run a monitor process as a background, system task to periodically
# display how many threads interesting processes are using.
"monitor process threads": &monitor_process_threads
command: shell.exec
params:
background: true
system_log: true
script: |
proc_list="(java|lein|mongo|python|_test$|_test\.exe$)"
if [ "Windows_NT" = "$OS" ]; then
get_pids() {
proc_pids=$(tasklist /fo:csv |
awk -F'","' '{x=$1; gsub("\"","",x); print $2, x}' |
grep -iE $1 |
cut -f1 -d ' ');
}
get_process_info() {
proc_name="";
proc_info=$(wmic process where "ProcessId=\"$1\"" get "Name,ProcessId,ThreadCount" /format:csv 2> /dev/null | grep $1);
if [ ! -z $proc_info ]; then
proc_name=$(echo $proc_info | cut -f2 -d ',');
proc_threads=$(echo $proc_info | cut -f4 -d ',');
fi;
}
else
get_pids() { proc_pids=$(pgrep $1); }
get_process_info() {
proc_name=$(ps -p $1 -o comm=);
# /proc is available on Linux platforms
if [ -f /proc/$1/status ]; then
${set_sudo}
proc_threads=$($sudo grep Threads /proc/$1/status | sed "s/\s//g" | cut -f2 -d ":");
else
proc_threads=$(ps -AM $1 | grep -vc PID);
fi;
}
fi
while [ 1 ]
do
get_pids $proc_list
if [ ! -z "$proc_pids" ]; then
printf "Running process/thread counter\n"
printf "PROCESS\tPID\tTHREADS\n"
fi
for pid in $proc_pids
do
get_process_info $pid
if [ ! -z "$proc_name" ]; then
printf "$proc_name\t$pid\t$proc_threads\n"
fi
done
sleep 60
done
"set up credentials": &set_up_credentials
command: shell.exec
params:
working_dir: src
silent: true
script: |
cat > mci.buildlogger <<END_OF_CREDS
slavename='${slave}'
passwd='${passwd}'
builder='MCI_${build_variant}'
build_num=${builder_num}
build_phase='${task_name}_${execution}'
END_OF_CREDS
"set up win mount script": &set_up_win_mount_script
command: shell.exec
params:
working_dir: src
shell: bash
silent: true
script: |
cat <<EOF > win_mount.sh
net use X: '\\\\${win_scons_endpoint}\\share' /USER:"wincache.build.com\${win_scons_user}" '${win_scons_pass}'
EOF
chmod +x win_mount.sh
"set up notary client credentials": &set_up_notary_client_credentials
command: shell.exec
params:
working_dir: src
silent: true
script: |
set -o errexit
cat <<EOF > notary_env.sh
export NOTARY_TOKEN=${signing_auth_token_46}
export BARQUE_USERNAME=${barque_user}
export BARQUE_PASSWORD=${barque_password}
EOF
echo "${signing_auth_token_46}" > signing_auth_token
"set up remote credentials": &set_up_remote_credentials
command: shell.exec
params:
silent: true
script: |
set -o errexit
# Since the macros 'private_key_remote' and 'private_key_file' are not always defined
# we default to /dev/null to avoid syntax errors of an empty expansion.
if [ ! -z "${private_key_remote}" ] && [ ! -z "${private_key_file}" ] ; then
mkdir -p ~/.ssh
echo -n "${private_key_remote}" > ${private_key_file|/dev/null}
chmod 0600 ${private_key_file|/dev/null}
fi
# Ensure a clean aws configuration state
rm -rf ~/.aws
mkdir -p ~/.aws
# If ${aws_profile_remote} is not specified then the config & credentials are
# stored in the 'default' profile.
aws_profile="${aws_profile_remote|default}"
# The profile in the config file is specified as [profile <profile>], except
# for [default], see http://boto3.readthedocs.io/en/latest/guide/configuration.html
if [ $aws_profile = "default" ] ; then
aws_profile_config="[default]"
else
aws_profile_config="[profile $aws_profile]"
fi
cat <<EOF >> ~/.aws/config
$aws_profile_config
region = us-east-1
EOF
# The profile in the credentials file is specified as [<profile>].
cat <<EOF >> ~/.aws/credentials
[$aws_profile]
aws_access_key_id = ${aws_key_remote}
aws_secret_access_key = ${aws_secret_remote}
EOF
cat <<EOF > ~/.boto
[Boto]
https_validate_certificates = False
EOF
"call BF Suggestion service":
command: shell.exec
params:
working_dir: src
shell: bash
silent: true
script: |
report_file="report.json"
# Check if the report file exists and has failures.
if [ -f $report_file ] && grep -Eq "\"failures\": [1-9]" $report_file; then
# Calling the BF Suggestion server endpoint to start feature extraction.
payload="{\"task_id\": \"${task_id}\", \"execution\": ${execution}}"
echo "Sending task info to the BF suggestion service"
# The --user option is passed through stdin to avoid showing in process list.
user_option="--user ${bfsuggestion_user}:${bfsuggestion_password}"
curl --header "Content-Type: application/json" \
--data "$payload" \
--max-time 10 \
--silent \
--show-error \
--config - \
https://bfsuggestion.corp.mongodb.com/tasks <<< $user_option
echo "Request to BF Suggestion service status: $?"
fi
"upload debugsymbols": &upload_debugsymbols
command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongo-debugsymbols.${ext|tgz}
remote_file: ${mongo_debugsymbols}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
"fetch debugsymbols archive": &fetch_debugsymbols_archive
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${mongo_debugsymbols}
bucket: mciuploads
local_file: src/mongo-debugsymbols.tgz
"fetch and build OpenSSL":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
if [ "${build_openssl|}" = "true" ]; then
bash buildscripts/fetch_and_build_openssl.sh "${python|python3}" "${openssl_make_flags|}" "${openssl_config_flags|}"
fi
"use WiredTiger develop":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
if [ "${use_wt_develop|}" = "true" ]; then
cd src/third_party
for wtdir in dist examples ext lang src test tools ; do
rm -rf wiredtiger/$wtdir
mv wtdevelop/$wtdir wiredtiger/
done
fi
"shared scons cache pruning":
command: shell.exec
type: system
params:
shell: bash
working_dir: src
script: |
set -o errexit
set -o verbose
# removes files from the shared scons cache.
# Only prune on master branch
if [[ "${project}" == "mongodb-mongo-master" ]]; then
set +o errexit
if [ "Windows_NT" = "$OS" ]; then
./win_mount.sh
else
mount | grep "\/efs" > /dev/null
fi
if [ $? -eq 0 ]; then
echo "Shared cache is already mounted"
else
echo "Shared cache - mounting file system"
if [ "Windows_NT" = "$OS" ]; then
./win_mount.sh
else
sudo mount /efs
fi
fi
set -o errexit
if [ "Windows_NT" = "$OS" ]; then
cache_folder=/cygdrive/x
else
cache_folder=/efs
fi
dirs=$(ls -l $cache_folder | grep -v total | awk '{print $NF}')
echo "Pruning shared SCons directories"
for dir in $dirs; do
echo "Pruning $cache_folder/$dir/scons-cache"
if [ -e $cache_folder/$dir/info/distro_name ]; then
distro=$(cat $cache_folder/$dir/info/distro_name)
fi
# Set cache sizes by distro
case $distro in
ubuntu1604|ubuntu1804|rhel62|rhel70)
cache_size=600
;;
*)
# default
cache_size=400
;;
esac
if [ "Windows_NT" = "$OS" ]; then
echo "dir="$dir
python buildscripts/scons_cache_prune.py --cache-dir x:/$dir/scons-cache --cache-size $cache_size --prune-ratio 1.0
else
sudo python buildscripts/scons_cache_prune.py --cache-dir /efs/$dir/scons-cache --cache-size $cache_size --prune-ratio 1.0
fi
echo ""
done
if [ "Windows_NT" = "$OS" ]; then
net use X: /delete || true
else
sudo umount /efs || true
fi
else
echo "Not on master, shared SCons cache pruning skipped"
fi
"umount shared scons directory":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
if [ "${disable_shared_scons_cache}" = true ]; then
exit
fi
if [ "${scons_cache_scope}" = "shared" ]; then
if [ "Windows_NT" = "$OS" ]; then
net use X: /delete || true
else
${set_sudo}
$sudo umount /efs || true
fi
fi
"get all modified patch files":
command: shell.exec
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
# For patch builds gather the modified patch files.
if [ "${is_patch}" = "true" ]; then
# Get list of patched files
git diff HEAD --name-only >> patch_files.txt
if [ -d src/mongo/db/modules/enterprise ]; then
pushd src/mongo/db/modules/enterprise
# Update the patch_files.txt in the mongo repo.
git diff HEAD --name-only >> ~1/patch_files.txt
popd
fi
fi
# This function should only be called from patch-build-only tasks.
"get added and modified patch files":
command: shell.exec
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
git diff HEAD --name-only --line-prefix="${workdir}/src/" --diff-filter=d >> modified_and_created_patch_files.txt
if [ -d src/mongo/db/modules/enterprise ]; then
pushd src/mongo/db/modules/enterprise
git diff HEAD --name-only --line-prefix="${workdir}/src/src/mongo/db/modules/enterprise/" --diff-filter=d >> ~1/modified_and_created_patch_files.txt
popd
fi
"determine resmoke jobs": &determine_resmoke_jobs
command: shell.exec
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
${activate_virtualenv}
$python buildscripts/evergreen_resmoke_job_count.py \
--taskName ${task_name} \
--buildVariant ${build_variant} \
--jobFactor ${resmoke_jobs_factor|1} \
--jobsMax ${resmoke_jobs_max|0} \
--outFile resmoke_jobs_expansion.yml
"update resmoke jobs expansions": &update_resmoke_jobs_expansions
command: expansions.update
params:
ignore_missing_file: true
file: src/resmoke_jobs_expansion.yml
"determine task timeout": &determine_task_timeout
command: shell.exec
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
${activate_virtualenv}
$python buildscripts/evergreen_task_timeout.py \
--task-name ${task_name} \
--build-variant ${build_variant} \
--timeout ${timeout_secs|0} \
--out-file task_timeout_expansions.yml
"update task timeout expansions": &update_task_timeout_expansions
command: expansions.update
params:
ignore_missing_file: true
file: src/task_timeout_expansions.yml
"update task timeout": &update_task_timeout
command: timeout.update
params:
timeout_secs: ${timeout_secs}
"update bypass expansions": &update_bypass_expansions
command: expansions.update
params:
ignore_missing_file: true
file: src/bypass_compile_expansions.yml
"bypass compile and fetch binaries":
command: shell.exec
params:
continue_on_err: true
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
if [ -n "${burn_in_bypass}" ]; then
${activate_virtualenv}
# Evergreen executable is in $HOME, so add that to the path.
DESTDIR=${destdir} PATH=$PATH:$HOME $python buildscripts/burn_in_tags_bypass_compile_and_fetch_binaries.py \
--project ${project} \
--build-variant ${burn_in_bypass} \
--revision ${revision} \
--out-file bypass_compile_expansions.yml \
--version-id ${version_id} \
--json-artifact artifacts.json
# For patch builds determine if we can bypass compile.
elif [[ "${is_patch}" = "true" && "${task_name}" = "compile" ]]; then
${activate_virtualenv}
# Evergreen executable is in $HOME, so add that to the path.
DESTDIR=${destdir} PATH=$PATH:$HOME $python buildscripts/bypass_compile_and_fetch_binaries.py \
--project ${project} \
--build-variant ${build_variant} \
--revision ${revision} \
--patch-file patch_files.txt \
--out-file bypass_compile_expansions.yml \
--json-artifact artifacts.json
fi
### Set expansion macros used in each task.
### Note that this function will override task expansions set in update_bypass_expansions
"set task expansion macros": &set_task_expansion_macros
command: expansions.update
params:
updates:
- key: activate_virtualenv
value: |
# check if virtualenv is set up
if [ -d "${workdir}/venv" ]; then
if [ "Windows_NT" = "$OS" ]; then
# Need to quote the path on Windows to preserve the separator.
. "${workdir}/venv/Scripts/activate" 2> /tmp/activate_error.log
else
. ${workdir}/venv/bin/activate 2> /tmp/activate_error.log
fi
if [ $? -ne 0 ]; then
echo "Failed to activate virtualenv: $(cat /tmp/activate_error.log)"
fi
python=python
else
python=${python|/opt/mongodbtoolchain/v3/bin/python3}
fi
if [ "Windows_NT" = "$OS" ]; then
export PYTHONPATH="$PYTHONPATH;$(cygpath -w ${workdir}/src)"
else
export PYTHONPATH="$PYTHONPATH:${workdir}/src"
fi
echo "python set to $(which $python)"
- key: add_nodejs_to_path
value: |
# Add node and npm binaries to PATH
if [ "Windows_NT" = "$OS" ]; then
# An "npm" directory might not have been created in %APPDATA% by the Windows installer.
# Work around the issue by specifying a different %APPDATA% path.
# See: https://github.com/nodejs/node-v0.x-archive/issues/8141
export APPDATA=${workdir}/npm-app-data
export PATH="$PATH:/cygdrive/c/Program Files (x86)/nodejs" # Windows location
# TODO: this is to work around BUILD-8652
cd "$(pwd -P | sed 's,cygdrive/c/,cygdrive/z/,')"
else
export PATH="$PATH:/opt/node/bin"
fi
- key: posix_workdir
value: eval 'if [ "Windows_NT" = "$OS" ]; then echo $(cygpath -u "${workdir}"); else echo ${workdir}; fi'
# For ssh disable the options GSSAPIAuthentication, CheckHostIP, StrictHostKeyChecking
# & UserKnownHostsFile, since these are local connections from one AWS instance to another.
- key: ssh_connection_options
value: -o GSSAPIAuthentication=no -o CheckHostIP=no -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=30 -o ConnectionAttempts=20
- key: ssh_retries
value: "10"
- key: set_sudo
value: |
set -o > /tmp/settings.log
set +o errexit
grep errexit /tmp/settings.log | grep on
errexit_on=$?
# Set errexit "off".
set +o errexit
sudo=
# Use sudo, if it is supported.
sudo date > /dev/null 2>&1
if [ $? -eq 0 ]; then
sudo=sudo
fi
# Set errexit "on", if previously enabled.
if [ $errexit_on -eq 0 ]; then
set -o errexit
fi
- key: mongo_binaries
value: ${project}/${build_variant}/${revision}/binaries/mongo-${build_id}.${ext|tgz}
- key: mongo_cryptd
value: ${project}/${build_variant}/${revision}/binaries/mongo-cryptd-${build_id}.${ext|tgz}
- key: mongo_cryptd_debugsymbols
value: ${project}/${build_variant}/${revision}/binaries/mongo-cryptd-debugsymbols-${build_id}.${ext|tgz}
- key: mh_archive
value: ${project}/${build_variant}/${revision}/binaries/mh-${build_id}.${ext|tgz}
- key: mh_debugsymbols
value: ${project}/${build_variant}/${revision}/debugsymbols/mh-debugsymbols-${build_id}.${ext|tgz}
- key: mongo_debugsymbols
value: ${project}/${build_variant}/${revision}/debugsymbols/debugsymbols-${build_id}.${ext|tgz}
- key: mongo_shell
value: ${project}/${build_variant}/${revision}/binaries/mongo-shell-${build_id}.${ext|tgz}
- key: mongo_shell_debugsymbols
value: ${project}/${build_variant}/${revision}/binaries/mongo-shell-debugsymbols-${build_id}.${ext|tgz}
- key: skip_tests
value: skip_test-${build_id}
"set up virtualenv": &set_up_virtualenv
command: shell.exec
params:
shell: bash
script: |
# exit immediately if virtualenv is not found
set -o errexit
virtualenv_loc=$(which ${virtualenv|virtualenv})
python_loc=$(which ${python|/opt/mongodbtoolchain/v3/bin/python3})
venv_dir="${workdir}/venv"
if command -V cygpath; then
# Sad note: We have to use the Windows path instead of the posix path here.
# Otherwise, virtualenv may mistakenly resolve paths relative to c:\cygdrive.
# Creating a virtualenv in cygwin with 'python -m venv', will not work correctly
# since the paths will be wrong. We need to use virtualenv and specify the
# python path as 'C:\python\python_version'
python_loc=$(cygpath -w $python_loc)
venv_dir="$(cygpath -w "$venv_dir")"
"$virtualenv_loc" --python "$python_loc" --system-site-packages "$venv_dir"
else
"$python_loc" -m venv --system-site-packages "$venv_dir"
fi
export VIRTUAL_ENV_DISABLE_PROMPT=yes
# Not all git get project calls clone into ${workdir}/src so we allow
# callers to tell us where the pip requirements files are.
pip_dir="${pip_dir}"
if [[ -z $pip_dir ]]; then
# Default to most common location
pip_dir="${workdir}/src/etc/pip"
fi
# Same as above we have to use quotes to preserve the
# Windows path separator
toolchain_txt="$pip_dir/toolchain-requirements.txt"
${activate_virtualenv}
python -m pip install -r "$toolchain_txt" -q
python -m pip freeze > pip-requirements.txt
"upload pip requirements": &upload_pip_requirements
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: pip-requirements.txt
remote_file: ${project}/${build_variant}/${revision}/pip-requirements-${task_id}-${execution}.txt
bucket: mciuploads
permissions: public-read
content_type: atext-plain
display_name: Pip Requirements
"send benchmark results":
command: json.send
params:
name: perf
file: src/perf.json
"do setup":
- *fetch_artifacts
- *update_bypass_expansions
- *fetch_binaries
- *fetch_debugsymbols_archive
- *extract_binaries
- *set_task_expansion_macros
- *set_up_virtualenv
- *upload_pip_requirements
- *check_binary_version
- *get_buildnumber
- *set_up_credentials
- *run_diskstats
- *monitor_process_threads
- *collect_system_resource_info
"do benchmark setup":
- command: manifest.load
- *git_get_project
- *set_task_expansion_macros
- *set_up_virtualenv
- *upload_pip_requirements
- *update_bypass_expansions
- *get_buildnumber
- *set_up_credentials
- *fetch_benchmarks
"do multiversion setup": &do_multiversion_setup
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
rm -rf /data/install /data/multiversion
edition="${multiversion_edition|base}"
platform="${multiversion_platform|linux}"
architecture="${multiversion_architecture|x86_64}"
$python buildscripts/setup_multiversion_mongodb.py \
--installDir /data/install \
--linkDir /data/multiversion \
--edition $edition \
--platform $platform \
--architecture $architecture \
--useLatest 3.2 3.4 3.6 4.0
# The platform and architecture for how some of the binaries are reported in
# https://downloads.mongodb.org/full.json changed between MongoDB 4.0 and MongoDB 4.2.
# Certain build variants define additional multiversion_*_42_or_later expansions in order to
# be able to fetch a complete set of versions.
if [ ! -z "${multiversion_edition_42_or_later}" ]; then
edition="${multiversion_edition_42_or_later}"
fi
if [ ! -z "${multiversion_platform_42_or_later}" ]; then
platform="${multiversion_platform_42_or_later}"
fi
if [ ! -z "${multiversion_architecture_42_or_later}" ]; then
architecture="${multiversion_architecture_42_or_later}"
fi
$python buildscripts/setup_multiversion_mongodb.py \
--installDir /data/install \
--linkDir /data/multiversion \
--edition $edition \
--platform $platform \
--architecture $architecture \
--useLatest 4.2 4.2.1
# The platform and architecture for how some of the binaries are reported in
# https://downloads.mongodb.org/full.json changed between MongoDB 4.2 and MongoDB 4.4.
# Certain build variants define additional multiversion_*_44_or_later expansions in order to
# be able to fetch a complete set of versions.
if [ ! -z "${multiversion_edition_44_or_later}" ]; then
edition="${multiversion_edition_44_or_later}"
fi
if [ ! -z "${multiversion_platform_44_or_later}" ]; then
platform="${multiversion_platform_44_or_later}"
fi
if [ ! -z "${multiversion_architecture_44_or_later}" ]; then
architecture="${multiversion_architecture_44_or_later}"
fi
$python buildscripts/setup_multiversion_mongodb.py \
--installDir /data/install \
--linkDir /data/multiversion \
--edition $edition \
--platform $platform \
--architecture $architecture \
--useLatest 4.3
"execute resmoke tests": &execute_resmoke_tests
command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
if [[ ${disable_unit_tests|false} = "false" && ! -f ${skip_tests|/dev/null} ]]; then
# activate the virtualenv if it has been set up
${activate_virtualenv}
# Set the TMPDIR environment variable to be a directory in the task's working
# directory so that temporary files created by processes spawned by resmoke.py get
# cleaned up after the task completes. This also ensures the spawned processes
# aren't impacted by limited space in the mount point for the /tmp directory.
export TMPDIR="${workdir}/tmp"
mkdir -p $TMPDIR
if [ -f /proc/self/coredump_filter ]; then
# Set the shell process (and its children processes) to dump ELF headers (bit 4),
# anonymous shared mappings (bit 1), and anonymous private mappings (bit 0).
echo 0x13 > /proc/self/coredump_filter
if [ -f /sbin/sysctl ]; then
# Check that the core pattern is set explicitly on our distro image instead
# of being the OS's default value. This ensures that coredump names are consistent
# across distros and can be picked up by Evergreen.
core_pattern=$(/sbin/sysctl -n "kernel.core_pattern")
if [ "$core_pattern" = "dump_%e.%p.core" ]; then
echo "Enabling coredumps"
ulimit -c unlimited
fi
fi
fi
if [ $(uname -s) == "Darwin" ]; then
core_pattern_mac=$(/usr/sbin/sysctl -n "kern.corefile")
if [ "$core_pattern_mac" = "dump_%N.%P.core" ]; then
echo "Enabling coredumps"
ulimit -c unlimited
fi
fi
extra_args="$extra_args --jobs=${resmoke_jobs|1}"
if [ ${should_shuffle|true} = true ]; then
extra_args="$extra_args --shuffle"
fi
if [ ${continue_on_failure|true} = true ]; then
extra_args="$extra_args --continueOnFailure"
fi
# We reduce the storage engine's cache size to reduce the likelihood of a mongod process
# being killed by the OOM killer. The --storageEngineCacheSizeGB command line option is only
# filled in with a default value here if one hasn't already been specified in the task's
# definition or build variant's definition.
set +o errexit
echo "${resmoke_args} ${test_flags}" | grep -q storageEngineCacheSizeGB
if [ $? -eq 1 ]; then
echo "${resmoke_args} ${test_flags}" | grep -q "\-\-storageEngine=inMemory"
if [ $? -eq 0 ]; then
# We use a default of 4GB for the InMemory storage engine.
extra_args="$extra_args --storageEngineCacheSizeGB=4"
else
# We use a default of 1GB for all other storage engines.
extra_args="$extra_args --storageEngineCacheSizeGB=1"
fi
fi
set -o errexit
# Reduce the JSHeapLimit for the serial_run task task on Code Coverage builder variant.
if [[ "${build_variant}" = "enterprise-rhel-62-64-bit-coverage" && "${task_name}" = "serial_run" ]]; then
extra_args="$extra_args --mongodSetParameter {'jsHeapLimitMB':10}"
fi
path_value="$PATH"
if [ ${variant_path_suffix} ]; then
path_value="$path_value:${variant_path_suffix}"
fi
if [ ${task_path_suffix} ]; then
path_value="$path_value:${task_path_suffix}"
fi
# The "resmoke_wrapper" expansion is used by the 'burn_in_tests' task to wrap the resmoke.py
# invocation. It doesn't set any environment variables and should therefore come last in
# this list of expansions.
set +o errexit
PATH="$path_value" \
AWS_PROFILE=${aws_profile_remote} \
RSK_is_asan_build=${is_asan_build|""} \
${gcov_environment} \
${lang_environment} \
${san_options} \
${san_symbolizer} \
${snmp_config_path} \
${resmoke_wrapper} \
$python buildscripts/evergreen_run_tests.py \
${resmoke_args} \
$extra_args \
${test_flags} \
--log=buildlogger \
--staggerJobs=on \
--installDir=${install_dir|dist-test/bin} \
--buildId=${build_id} \
--distroId=${distro_id} \
--executionNumber=${execution} \
--projectName=${project} \
--gitRevision=${revision} \
--revisionOrderId=${revision_order_id} \
--taskId=${task_id} \
--taskName=${task_name} \
--variantName=${build_variant} \
--versionId=${version_id} \
--archiveFile=archive.json \
--reportFile=report.json \
--perfReportFile=perf.json
resmoke_exit_code=$?
set -o errexit
# 74 is exit code for IOError on POSIX systems, which is raised when the machine is
# shutting down.
#
# 75 is exit code resmoke.py uses when the log output would be incomplete due to failing
# to communicate with logkeeper.
if [[ $resmoke_exit_code = 74 || $resmoke_exit_code = 75 ]]; then
echo $resmoke_exit_code > run_tests_infrastructure_failure
exit 0
elif [ $resmoke_exit_code != 0 ]; then
# On failure save the resmoke exit code.
echo $resmoke_exit_code > resmoke_error_code
fi
exit $resmoke_exit_code
fi # end if [[ ${disable_unit_tests} && ! -f ${skip_tests|/dev/null} ]]
"retrieve generated test configuration": &retrieve_generated_test_configuration
command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
bucket: mciuploads
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${task}_gen-${build_id}.tgz
local_file: "generate_tasks_config.tgz"
"extract generated test configuration": &extract_generated_test_configuration
command: shell.exec
type: test
params:
shell: bash
script: |
set -o verbose
set -o errexit
target_dir="src/generated_resmoke_config"
mkdir -p $target_dir
mv generate_tasks_config.tgz $target_dir
cd $target_dir
tar xzf generate_tasks_config.tgz
"generate selected tests":
- command: expansions.write
params:
file: src/expansions.yml
- *configure_evergreen_api_credentials
- *configure_selected_tests_credentials
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
# Only run on master branch
if [[ "${project}" != "mongodb-mongo-master" ]]; then
if [ "${is_patch}" = "true" ]; then
echo "Not on master, skipping selected_tests_gen task"
exit 1
else
exit 0
fi
else
${activate_virtualenv}
PATH=$PATH:$HOME $python buildscripts/selected_tests.py --expansion-file expansions.yml --verbose --selected-tests-config .selected_tests.yml
fi
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/selected_tests_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${task_name}-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Task Config - Execution ${execution}
optional: true
- command: generate.tasks
params:
optional: true
files:
- src/selected_tests_config/*.json
"generate burn in tags":
- command: expansions.write
params:
file: src/expansions.yml
- *configure_evergreen_api_credentials
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
${activate_virtualenv}
PATH=$PATH:$HOME $python buildscripts/burn_in_tags.py --expansion-file expansions.yml
- command: archive.targz_pack
params:
target: burn_in_tags_gen.tgz
source_dir: src/generated_burn_in_tags_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: burn_in_tags_gen.tgz
remote_file: ${project}/${build_variant}/${revision}/burn_in_tags_gen/burn_in_tags_gen-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Burn_in_tags Task Config - Execution ${execution}
- command: generate.tasks
params:
files:
- src/generated_burn_in_tags_config/burn_in_tags_gen.json
"generate randomized multiversion tasks":
- command: manifest.load
- *git_get_project
- *set_task_expansion_macros
- *set_up_virtualenv
- *upload_pip_requirements
- *configure_evergreen_api_credentials
- command: expansions.write
params:
file: src/expansions.yml
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o errexit
${activate_virtualenv}
$python buildscripts/evergreen_generate_resmoke_tasks.py --expansion-file expansions.yml --verbose
- *do_multiversion_setup
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_gen_multiversion_tests.py generate-exclude-files --suite=${suite} --task-path-suffix=${use_multiversion} --is-generated-suite=true
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/generated_resmoke_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${task_name}-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Task Config - Execution ${execution}
optional: true
- command: generate.tasks
params:
optional: true
files:
- src/generated_resmoke_config/*.json
"generate resmoke tasks":
- command: manifest.load
- *git_get_project
- *set_task_expansion_macros
- *set_up_virtualenv
- *upload_pip_requirements
- *configure_evergreen_api_credentials
- command: expansions.write
params:
file: src/expansions.yml
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o errexit
${activate_virtualenv}
$python buildscripts/evergreen_generate_resmoke_tasks.py --expansion-file expansions.yml --verbose
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/generated_resmoke_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${task_name}-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Task Config - Execution ${execution}
optional: true
- command: generate.tasks
params:
optional: true
files:
- src/generated_resmoke_config/*.json
"run generated tests":
- *retrieve_generated_test_configuration
- *extract_generated_test_configuration
- command: expansions.update
params:
updates:
- key: aws_key_remote
value: ${mongodatafiles_aws_key}
- key: aws_profile_remote
value: mongodata_aws
- key: aws_secret_remote
value: ${mongodatafiles_aws_secret}
- *set_up_remote_credentials
- *determine_resmoke_jobs
- *update_resmoke_jobs_expansions
- *execute_resmoke_tests
# The existence of the "run_tests_infrastructure_failure" file indicates this failure isn't
# directly actionable. We use type=setup rather than type=system or type=test for this command
# because we don't intend for any human to look at this failure.
- command: shell.exec
type: setup
params:
working_dir: src
script: |
set -o verbose
if [ -f run_tests_infrastructure_failure ]; then
exit $(cat run_tests_infrastructure_failure)
fi
"run tests":
- *determine_task_timeout
- *update_task_timeout_expansions
- *update_task_timeout
- command: expansions.update
params:
updates:
- key: aws_key_remote
value: ${mongodatafiles_aws_key}
- key: aws_profile_remote
value: mongodata_aws
- key: aws_secret_remote
value: ${mongodatafiles_aws_secret}
- *set_up_remote_credentials
- *determine_resmoke_jobs
- *update_resmoke_jobs_expansions
- *execute_resmoke_tests
# The existence of the "run_tests_infrastructure_failure" file indicates this failure isn't
# directly actionable. We use type=setup rather than type=system or type=test for this command
# because we don't intend for any human to look at this failure.
- command: shell.exec
type: setup
params:
working_dir: src
script: |
set -o verbose
if [ -f run_tests_infrastructure_failure ]; then
exit $(cat run_tests_infrastructure_failure)
fi
"scons lint":
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
export MYPY="$(
if command -V cygpath 2>/dev/null; then
PATH+=":$(cypath "${workdir}")/venv_3/Scripts"
else
PATH+=":${workdir}/venv_3/bin"
fi
PATH+=':/opt/mongodbtoolchain/v3/bin'
which mypy
)"
echo "Found mypy executable at '$MYPY'"
export extra_flags=""
if [[ ${is_patch|false} == "true" ]]; then
extra_flags="--lint-scope=changed"
fi
${compile_env|} python3 ./buildscripts/scons.py ${compile_flags|} $extra_flags --stack-size=1024 GITDIFFFLAGS="${revision}" REVISION="${revision|}" ENTERPRISE_REV="${enterprise_rev|}" ${targets}
"scons compile":
command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
if [ "${is_patch}" = "true" ] && [ "${bypass_compile|false}" = "true" ]; then
exit 0
fi
rm -rf ${install_directory|/data/mongo-install-directory}
# Only allow hygienic builds in Evergreen
extra_args="--install-mode=hygienic"
if [ -n "${num_scons_link_jobs_available|}" ]; then
echo "Changing SCons to run with --jlink=${num_scons_link_jobs_available|}"
extra_args="$extra_args --jlink=${num_scons_link_jobs_available|}"
fi
if [ "${scons_cache_scope|}" = "shared" ]; then
extra_args="$extra_args --cache-debug=scons_cache.log"
fi
# Enable performance debugging
extra_args="$extra_args --debug=time"
# Build packages where the upload tasks expect them
if [ -n "${git_project_directory|}" ]; then
extra_args="$extra_args PKGDIR=${git_project_directory}"
else
extra_args="$extra_args PKGDIR=${workdir}/src"
fi
# If we are doing a patch build or we are building a non-push
# build on the waterfall, then we don't need the --release
# flag. Otherwise, this is potentially a build that "leaves
# the building", so we do want that flag. The non --release
# case should auto enale the faster decider when
# applicable. Furthermore, for the non --release cases we can
# accelerate the build slightly for situations where we invoke
# SCons multiple times on the same machine by allowing SCons
# to assume that implicit dependencies are cacheable across
# runs.
if [ "${is_patch|false}" = "true" ] || [ -z "${push_bucket|}" ] || [ "${compiling_for_test|false}" = "true" ]; then
extra_args="$extra_args --implicit-cache --build-fast-and-loose=on"
else
extra_args="$extra_args --release"
fi
${activate_virtualenv}
${compile_env|} $python ./buildscripts/scons.py \
${compile_flags|} ${task_compile_flags|} ${task_compile_flags_extra|} \
${scons_cache_args|} $extra_args \
${targets} MONGO_VERSION=${version} || exit_status=$?
# If compile fails we do not run any tests
if [[ $exit_status -ne 0 ]]; then
if [[ "${dump_scons_config_on_failure}" == true ]]; then
echo "Dumping build/scons/config.log"
cat build/scons/config.log
fi
touch ${skip_tests}
fi
exit $exit_status
"generate compile expansions":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
# We get the raw version string (r1.2.3-45-gabcdef) from git
MONGO_VERSION=$(git describe --abbrev=7)
# If this is a patch build, we add the patch version id to the version string so we know
# this build was a patch, and which evergreen task it came from
if [ "${is_patch}" = "true" ] && [ "${bypass_compile|false}" = "false" ]; then
MONGO_VERSION="$MONGO_VERSION-patch-${version_id}"
fi
${activate_virtualenv}
# shared scons cache testing
# if 'scons_cache_scope' enabled and project level 'disable_shared_scons_cache' is not true
# 'scons_cache_scope' is set on a per variant basis
# 'disable_shared_scons_cache' is set on a project level and applies to all variants
# Shared - if scons_cache_scope is set, then use new shared scons cache settings
if [ ! -z ${scons_cache_scope} ]; then
if [ "${disable_shared_scons_cache}" = "true" ]; then
echo "SCons Cache disabled. All shared scons settings will be ignored"
scons_cache_scope=none
else
scons_cache_scope=${scons_cache_scope}
fi
if [ "$scons_cache_scope" = "shared" ]; then
set +o errexit
if [ "Windows_NT" = "$OS" ]; then
./win_mount.sh
else
mount | grep "\/efs" > /dev/null
if [ $? -eq 0 ]; then
echo "Shared cache is already mounted"
else
echo "Shared cache - mounting file system"
${set_sudo}
$sudo mount /efs
fi
fi
set -o errexit
fi
echo "Shared Cache with setting: ${scons_cache_scope}"
MONGO_VERSION=$MONGO_VERSION SCONS_CACHE_MODE=${scons_cache_mode|nolinked} SCONS_CACHE_SCOPE=$scons_cache_scope IS_PATCH=${is_patch} IS_COMMIT_QUEUE=${is_commit_queue|false} $python buildscripts/generate_compile_expansions_shared_cache.py --out compile_expansions.yml
# Legacy Expansion generation
else
echo "Using legacy expansion generation"
# Proceed with regular expansions generated
# This script converts the generated version string into a sanitized version string for
# use by scons and uploading artifacts as well as information about for the scons cache.
MONGO_VERSION=$MONGO_VERSION SCONS_CACHE_MODE=${scons_cache_mode|nolinked} USE_SCONS_CACHE=${use_scons_cache|false} $python buildscripts/generate_compile_expansions.py --out compile_expansions.yml
fi
"apply compile expansions":
command: expansions.update
params:
file: src/compile_expansions.yml
"do jepsen setup":
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
# Build libfaketime. A version of libfaketime at least as new as v0.9.6-9-g75896bd is
# required to use the FAKETIME_NO_CACHE and FAKETIME_TIMESTAMP_FILE environment variables.
# Additionally, a version of libfaketime containing the changes mentioned in SERVER-29336
# is required to avoid needing to use libfaketimeMT.so.1 and serializing all calls to
# fake_clock_gettime() with a mutex.
git clone --branch=for-jepsen --depth=1 git@github.com:10gen/libfaketime.git
cd libfaketime
branch=$(git symbolic-ref --short HEAD)
commit=$(git show -s --pretty=format:"%h - %an, %ar: %s")
echo "Git branch: $branch, commit: $commit"
make PREFIX=$(pwd)/build/ LIBDIRNAME='.' install
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
git clone --branch=jepsen-mongodb-master --depth=1 git@github.com:10gen/jepsen.git jepsen-mongodb
cd jepsen-mongodb
branch=$(git symbolic-ref --short HEAD)
commit=$(git show -s --pretty=format:"%h - %an, %ar: %s")
echo "Git branch: $branch, commit: $commit"
lein install
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
${activate_virtualenv}
$python -c 'import socket; num_nodes = 5; print("\n".join(["%s:%d" % (socket.gethostname(), port) for port in range(20000, 20000 + num_nodes)]))' > nodes.txt
"run jepsen test":
- command: shell.exec
type: test
timeout_secs: 2700 # Timeout test if there is no output for more than 45 minutes.
params:
working_dir: src/jepsen-mongodb
script: |
set -o verbose
# Set the TMPDIR environment variable to be a directory in the task's working
# directory so that temporary files created by processes spawned by jepsen get
# cleaned up after the task completes. This also ensures the spawned processes
# aren't impacted by limited space in the mount point for the /tmp directory.
# We also need to set the _JAVA_OPTIONS environment variable so that lein will
# recognize this as the default temp directory.
export TMPDIR="${workdir}/tmp"
mkdir -p $TMPDIR
export _JAVA_OPTIONS=-Djava.io.tmpdir=$TMPDIR
start_time=$(date +%s)
lein run test --test ${jepsen_test_name} \
--mongodb-dir ../ \
--working-dir ${workdir}/src/jepsen-workdir \
--clock-skew faketime \
--libfaketime-path ${workdir}/src/libfaketime/build/libfaketime.so.1 \
--mongod-conf mongod_verbose.conf \
--virtualization none \
--nodes-file ../nodes.txt \
${jepsen_key_time_limit} \
${jepsen_protocol_version} \
${jepsen_read_concern} \
${jepsen_read_with_find_and_modify} \
${jepsen_storage_engine} \
${jepsen_time_limit} \
${jepsen_write_concern} \
2>&1 \
| tee jepsen_${task_name}_${execution}.log
end_time=$(date +%s)
elapsed_secs=$((end_time-start_time))
# Since we cannot use PIPESTATUS to get the exit code from the "lein run ..." pipe in dash shell,
# we will check the output for success, failure or setup error. Note that 'grep' returns with exit code
# 0 if it finds a match, and exit code 1 if no match is found.
grep -q "Everything looks good" jepsen_${task_name}_${execution}.log
grep_exit_code=$?
if [ $grep_exit_code -eq 0 ]; then
status='"pass"'
failures=0
final_exit_code=0
else
grep -q "Analysis invalid" jepsen_${task_name}_${execution}.log
grep_exit_code=$?
if [ $grep_exit_code -eq 0 ]; then
status='"fail"'
failures=1
final_exit_code=1
else
# If the failure is due to setup, then this is considered a system failure.
echo $grep_exit_code > jepsen_system_failure_${task_name}_${execution}
exit 0
fi
fi
# Create report.json
echo "{\"failures\": $failures, \"results\": [{\"status\": $status, \"exit_code\": $final_exit_code, \"test_file\": \"${task_name}\", \"start\": $start_time, \"end\": $end_time, \"elapsed\": $elapsed_secs}]}" > ../report.json
exit $final_exit_code
- command: shell.exec
params:
working_dir: src/jepsen-mongodb
script: |
set -o verbose
# Jepsen system failure if file exists.
if [ -f jepsen_system_failure_${task_name}_${execution} ]; then
exit $(cat jepsen_system_failure_${task_name}_${execution})
fi
"load aws test credentials":
- command: shell.exec
params:
silent: true
working_dir: src
script: |
set -o errexit
echo "const AWS_KMS_SECRET_ID = '${aws_kms_access_key_id}';" >> src/mongo/db/modules/enterprise/jstests/fle/lib/aws_secrets.js
echo "const AWS_KMS_SECRET_KEY = '${aws_kms_secret_access_key}';" >> src/mongo/db/modules/enterprise/jstests/fle/lib/aws_secrets.js
"generate multiversion tasks":
- command: manifest.load
- *git_get_project
- *set_task_expansion_macros
- *set_up_virtualenv
- *configure_evergreen_api_credentials
- command: expansions.write
params:
file: src/expansions.yml
- *do_multiversion_setup
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_gen_multiversion_tests.py run --expansion-file expansions.yml
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_gen_multiversion_tests.py generate-exclude-files --suite=${task_name} --task-path-suffix=${task_path_suffix} --is-generated-suite=true
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/generated_resmoke_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${task_name}-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Task Config - Execution ${execution}
- command: generate.tasks
params:
files:
- src/generated_resmoke_config/*.json
"generate fuzzer tasks":
- command: manifest.load
- *git_get_project
- *set_task_expansion_macros
- *set_up_virtualenv
- *upload_pip_requirements
- command: expansions.write
params:
file: src/expansions.yml
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_gen_fuzzer_tests.py --expansion-file expansions.yml
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/generated_resmoke_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/${name}_gen-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Task Config - Execution ${execution}
- command: generate.tasks
params:
files:
- src/generated_resmoke_config/${name}.json
"setup jstestfuzz":
- command: shell.exec
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
${add_nodejs_to_path}
git clone git@github.com:10gen/jstestfuzz.git
pushd jstestfuzz
npm install
npm run prepare
popd
"lint fuzzer sanity patch":
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -eo pipefail
set -o verbose
${add_nodejs_to_path}
# Run parse-jsfiles on 50 files at a time with 32 processes in parallel.
# Grep returns 1 if it fails to find a match.
(grep "\.js$" modified_and_created_patch_files.txt || true) | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles --
"lint fuzzer sanity all":
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -eo pipefail
set -o verbose
${add_nodejs_to_path}
# Run parse-jsfiles on 50 files at a time with 32 processes in parallel.
find "$PWD/jstests" "$PWD/src/mongo/db/modules/enterprise" -name "*.js" -print | xargs -P 32 -L 50 npm run --prefix jstestfuzz parse-jsfiles --
"run jstestfuzz":
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
git clone --depth 1 git@github.com:10gen/mongo-enterprise-modules.git jstests/enterprise_tests
git clone --depth 1 git@github.com:10gen/QA.git jstests/qa_tests
- command: shell.exec
type: test
params:
working_dir: src/jstestfuzz
script: |
set -o errexit
set -o verbose
${add_nodejs_to_path}
npm run ${npm_command|jstestfuzz} -- ${jstestfuzz_vars} --branch ${branch_name}
- command: archive.targz_pack
params:
target: "jstests.tgz"
source_dir: "src/jstestfuzz"
include:
- "out/*.js"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: jstests.tgz
remote_file: ${project}/${build_variant}/${revision}/jstestfuzz/${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Generated Tests - Execution ${execution}
"run idl tests":
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o verbose
set -o errexit
${activate_virtualenv}
$python buildscripts/idl/run_tests.py
"run powercycle test":
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
${set_sudo}
if [ ! -z $sudo ]; then
remote_sudo="--remoteSudo"
fi
${activate_virtualenv}
# The virtualenv bin_dir is different for Linux and Windows
bin_dir=$(find $VIRTUAL_ENV -name activate | sed -e "s,$VIRTUAL_ENV,,;s,activate,,;s,/,,g")
cmds=". ${virtualenv_dir|venv}/$bin_dir/activate"
cmds="$cmds; python -u"
# The remote python operates in a virtualenv
remote_python="--remotePython=\"$cmds\""
# Initialize report.json. The report will be finalized by powertest.py.
report_json="report.json"
start_time=$(date +%s)
status="\"fail\""
failures=0
exit_code=1
end_time=$start_time
elapsed_secs=0
echo "{\"failures\": $failures, \"results\": [{\"status\": $status, \"exit_code\": $exit_code, \"test_file\": \"${task_name}\", \"start\": $start_time, \"end\": $end_time, \"elapsed\": $elapsed_secs}]}" > $report_json
generate_report_json="--reportJsonFile=$report_json"
# Windows task overrides:
# - Execute 10 test loops
# - Cap the maximum number of clients to 10 each
if [ "Windows_NT" = "$OS" ]; then
test_override=--testLoops=10
max_clients=10
for client in --numCrudClients --numFsmClients
do
override=$(echo ${client_options} | awk "BEGIN {FS=\" |=\"} {for(j=1;j<=NF;j++) if (\$j~/^$client/) {min=(\$(j+1) < $max_clients) ? \$(j+1) : $max_clients; printf \"%s=%d\", \$j,min}}")
client_override="$client_override $override"
done
fi
# Set an exit trap so we can save the real exit status (see SERVER-34033).
trap 'echo $? > error_exit.txt; exit 0' EXIT
config_file=powertest.yml
eval $python pytests/powertest.py \
"--saveConfigOptions=$config_file \
${connection_options} \
${program_options} \
$generate_report_json \
$remote_sudo \
$remote_python \
${test_options} \
$test_override \
${crash_options} \
${client_options} \
$client_override \
${mongodb_options} \
${mongod_options} \
${mongod_extra_options}"
set +o errexit
$python -u pytests/powertest.py --configFile=$config_file
- command: expansions.update
params:
ignore_missing_file: true
file: src/${exit_file}
- command: shell.exec
params:
working_dir: src
shell: bash
script: |
# Trigger a system failure if powertest.py failed due to ssh access.
if [ -n "${ec2_ssh_failure}" ]; then
echo "ec2_ssh_failure detected - $(cat ${exit_file})"
exit ${exit_code}
fi
- command: shell.exec
params:
working_dir: src
shell: bash
script: |
if [ ! -f report.json ]; then
exit 0
fi
grep -q "pass" report.json
pass=$?
# On test success, we only archive mongod.log.
if [ $pass -eq 0 ]; then
echo "ec2_artifacts: ${log_path}" > ec2_artifacts.yml
fi
- command: expansions.update
params:
ignore_missing_file: true
file: src/ec2_artifacts.yml
- command: shell.exec
type: test
params:
shell: bash
script: |
# Test exits from here with specified exit_code.
if [ -n "${exit_code}" ]; then
# Python program saved exit_code
exit_code=${exit_code}
elif [ -f error_exit.txt ]; then
# Bash trap exit_code
exit_code=$(cat error_exit.txt)
else
exit_code=0
fi
echo "Exiting powercycle with code $exit_code"
exit $exit_code
"run packager.py":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
if [ "${is_patch}" = "true" ] && [ "${bypass_compile|false}" = "true" ]; then
exit 0
fi
${activate_virtualenv}
if [ "${has_packages|}" = "true" ] ; then
cd buildscripts
$python ${packager_script} --prefix `pwd`/.. --distros ${packager_distro} --tarball `pwd`/../mongodb-dist.tgz -s ${version} -m HEAD -a ${packager_arch}
cd ..
fi
"do snmp setup":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
mkdir -p snmpconf
cp -f src/mongo/db/modules/enterprise/docs/mongod.conf.master snmpconf/mongod.conf
"do watchdog setup":
command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
bash jstests/watchdog/charybdefs_setup.sh
"cleanup environment":
command: shell.exec
params:
script: |
set -o verbose
rm -rf src /data/db/* mongo-diskstats* mongo-*.tgz ~/.aws ~/.boto venv
exit 0
"kill processes":
command: shell.exec
params:
silent: true
script: |
process_kill_list="(^cl\.exe$|java|lein|lldb|mongo|python|_test$|_test\.exe$)"
# Exclude Evergreen agent processes and other system daemons
process_exclude_list="(main|tuned|evergreen)"
if [ "Windows_NT" = "$OS" ]; then
# Get the list of Windows tasks (tasklist list format):
# - Transpose the Image Name and PID
# - The first column has the process ID
# - The second column (and beyond) has task name
# - Grep for the task names of interest while ignoring any names that are in the exclude list
processes=$(tasklist /fo:csv | awk -F'","' '{x=$1; gsub("\"","",x); print $2, x}' | grep -iE "$process_kill_list" | grep -ivE "$process_exclude_list")
# Kill the Windows process by process ID with force (/f)
kill_process () { pid=$(echo $1 | cut -f1 -d ' '); echo "Killing process $1"; taskkill /pid "$pid" /f; }
else
# Get the list of Unix tasks (pgrep full & long):
# - Grep for the task names of interest while ignoring any names that are in the exclude list
# - The first column has the process ID
# - The second column (and beyond) has task name
# There are 2 "styles" of pgrep, figure out which one works.
# Due to https://bugs.launchpad.net/ubuntu/+source/procps/+bug/1501916
# we cannot rely on the return status ($?) to detect if the option is supported.
pgrep -f --list-full ".*" 2>&1 | grep -qE "(illegal|invalid|unrecognized) option"
if [ $? -ne 0 ]; then
pgrep_list=$(pgrep -f --list-full "$process_kill_list")
else
pgrep_list=$(pgrep -f -l "$process_kill_list")
fi
# Since a process name might have a CR or LF in it, we need to delete any lines from
# pgrep which do not start with space(s) and 1 digit and trim any leading spaces.
processes=$(echo "$pgrep_list" | grep -ivE "$process_exclude_list" | sed -e '/^ *[0-9]/!d; s/^ *//; s/[[:cntrl:]]//g;')
# Kill the Unix process ID with signal KILL (9)
kill_process () { pid=$(echo $1 | cut -f1 -d ' '); echo "Killing process $1"; kill -9 $pid; }
fi
# Since a full process name can have spaces, the IFS (internal field separator)
# should not include a space, just a LF & CR
IFS=$(printf "\n\r")
for process in $processes
do
kill_process "$process"
done
exit 0
"run kitchen":
command: shell.exec
type: test
params:
shell: bash
working_dir: src/buildscripts/package_test
script: |
set -o errexit
export KITCHEN_ARTIFACTS_URL="https://s3.amazonaws.com/mciuploads/${project}/${build_variant}/${revision}/artifacts/${build_id}-packages.tgz"
export KITCHEN_SECURITY_GROUP="${kitchen_security_group}"
export KITCHEN_SSH_KEY_ID="${kitchen_ssh_key_id}"
export KITCHEN_SUBNET="${kitchen_subnet}"
export KITCHEN_VPC="${kitchen_vpc}"
${activate_virtualenv}
# set expiration tag 2 hours in the future, since no test should take this long
export KITCHEN_EXPIRE="$($python -c 'import datetime; print((datetime.datetime.utcnow() + datetime.timedelta(hours=2)).strftime("%Y-%m-%d %H:%M:%S"))')"
for i in {1..3}
do
# kitchen commands use regex so 'kitchen verify amazon' matches both amazon and amazon2
# that's why we pass $ at the end of "${packager_distro}"
if ! kitchen verify "${packager_distro}"\$; then
verified="false"
kitchen destroy "${packager_distro}"\$ || true
sleep 30
else
verified="true"
break
fi
done
kitchen destroy "${packager_distro}"\$ || true
test "$verified" = "true"
"copy ec2 monitor files": &copy_ec2_monitor_files
command: shell.exec
params:
background: true
system_log: true
working_dir: src
silent: false
script: |
while [ 1 ]
do
# Tar/zip monitor files on remote host.
if [ -z "${ec2_monitor_files}" ] || [ -z "${instance_id}" ]; then
exit 0
fi
# Ensure we use the latest private_ip_address, as it could change if the EC2 instance
# has been stopped and started.
${activate_virtualenv}
# Specify '--mode start' to ensure the remote instance is running.
monitor_ec2_yml=monitor_ec2.yml
$python buildscripts/aws_ec2.py --imageId ${instance_id} --mode start --yamlFile $monitor_ec2_yml
echo "AMI EC2 instance ${instance_id} status: $(cat $monitor_ec2_yml)"
private_ip_address=$($python buildscripts/yaml_key_value.py --yamlFile $monitor_ec2_yml --yamlKey private_ip_address)
if [ -z "$private_ip_address" ]; then
echo "Cannot determine the IP address for the remote monitor."
continue
fi
cmd="${tar|tar} czf ec2_monitor_files.tgz ${ec2_monitor_files}"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
ssh_connection_options="$ssh_connection_options -o ConnectionAttempts=3"
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@$private_ip_address \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmd"
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@$private_ip_address \
--operation "copy_from" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--file ec2_monitor_files.tgz
sleep 30
done
"set up EC2 instance": &set_up_ec2_instance
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
${activate_virtualenv}
if [ ! -z "${subnet_id}" ]; then
subnet_id="-n ${subnet_id}"
fi
for security_group_id in ${security_group_ids}
do
security_group_ids="$security_group_ids -g $security_group_id"
done
if [ -z "${security_group_ids}" ]; then
for security_group in ${security_groups}
do
security_groups="$security_groups -s $security_group"
done
fi
if [ -n "${ec2_expire_hours}" ]; then
expire_hours="-e ${ec2_expire_hours}"
# Since Windows hosts are expensive to keep running we'll expire it after 3 hours.
if [ "Windows_NT" = "$OS" ]; then
expire_hours="-e 3"
fi
fi
# Clone another instance of this host in EC2.
buildscripts/launch_evergreen_ec2_instance.sh \
$expire_hours \
-k ${ssh_key_id} \
$security_groups \
$security_group_ids \
$subnet_id \
-t "AMI Evergreen ${task_id}" \
-y ${aws_ec2_yml}
- command: expansions.update
params:
file: src/${aws_ec2_yml}
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
# Copy mount_drives.sh script to remote host.
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_to" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--file buildscripts/mount_drives.sh
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
# Mount /data on the attached drive(s), more than 1 indicates a RAID set.
${set_sudo}
script_opts="-d '${data_device_names}'"
if [ ! -z "${raid_data_device_name}" ]; then
script_opts="$script_opts -r ${raid_data_device_name}"
fi
if [ ! -z "${fstype}" ]; then
script_opts="$script_opts -t ${fstype}"
fi
if [ ! -z "${fs_options}" ]; then
script_opts="$script_opts -o '${fs_options}'"
fi
# Mount /log on the attached drive.
if [ ! -z "${log_device_name}" ]; then
script_opts="$script_opts -l '${log_device_name}'"
log="/log"
fi
group=$(id -Gn $USER | cut -f1 -d ' ') || true
user_group="$USER:$group"
script_opts="$script_opts -u $user_group"
data_db=/data/db
cmds="$sudo bash mount_drives.sh $script_opts; mount; ls -ld $data_db $log; df"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
# Create remote_dir, if specified as expansion macro and is not '.' (pwd).
if [[ -z "${remote_dir|}" || ${remote_dir} == "." ]]; then
exit 0
fi
${set_sudo}
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
group=$(id -Gn $USER | cut -f1 -d ' ') || true
user_group="$USER:$group"
set_permission="chmod 777 ${remote_dir}"
if [ "Windows_NT" = "$OS" ]; then
set_permission="setfacl -s user::rwx,group::rwx,other::rwx ${remote_dir}"
fi
cmds="$sudo mkdir -p ${remote_dir}; $sudo chown $user_group ${remote_dir}; $set_permission; ls -ld ${remote_dir}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
# Copy buildscripts, pytests and mongoDB executables to the remote host.
file_param="--file etc --file buildscripts --file pytests"
mongo_executables="mongo mongod mongos"
for executable in $mongo_executables
do
file_param="$file_param --file $executable${exe}"
done
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_to" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
$file_param \
--remoteDir ${remote_dir}
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
# Set up virtualenv on remote.
cmds="python_loc=\$(which \${python|/opt/mongodbtoolchain/v3/bin/python3})"
cmds="$cmds; remote_dir=${remote_dir|.}"
cmds="$cmds; if [ \"Windows_NT\" = \"$OS\" ]; then python_loc=\$(cygpath -w \$python_loc); remote_dir=\$(cygpath -w \$remote_dir); fi"
cmds="$cmds; virtualenv --python \$python_loc --system-site-packages ${virtualenv_dir|venv}"
cmds="$cmds; activate=\$(find ${virtualenv_dir|venv} -name 'activate')"
cmds="$cmds; . \$activate"
cmds="$cmds; pip3 install -r \$remote_dir/etc/pip/powercycle-requirements.txt"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
if [ "Windows_NT" = "$OS" ]; then
exit 0
fi
# Enable core dumps on non-Windows remote hosts.
# The core pattern must specify a director, since mongod --fork will chdir("/")
# and cannot generate a core dump there (see SERVER-21635).
# We need to reboot the host for the core limits to take effect.
${set_sudo}
core_pattern=${remote_dir}/dump_%e.%p.core
sysctl_conf=/etc/sysctl.conf
cmds="ulimit -a"
cmds="$cmds; echo \"$USER - core unlimited\" | $sudo tee -a /etc/security/limits.conf"
cmds="$cmds; if [ -f $sysctl_conf ]"
cmds="$cmds; then grep ^kernel.core_pattern $sysctl_conf"
cmds="$cmds; if [ \$? -eq 0 ]"
cmds="$cmds; then $sudo sed -i \"s,kernel.core_pattern=.*,kernel.core_pattern=$core_pattern,\" $sysctl_conf"
cmds="$cmds; else echo \"kernel.core_pattern=$core_pattern\" | $sudo tee -a $sysctl_conf"
cmds="$cmds; fi"
cmds="$cmds; else echo Cannot change the core pattern and no core dumps will be generated."
cmds="$cmds; fi"
# The following line for restarting the machine is based on
# https://unix.stackexchange.com/a/349558 in order to ensure the ssh client gets a
# response from the remote machine before it restarts.
cmds="$cmds; nohup $sudo reboot &>/dev/null & exit"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
if [ "Windows_NT" = "$OS" ]; then
exit 0
fi
# Always exit successfully, as this is just informational.
trap 'echo "Trapped exit code $?, exiting with 0"; exit 0' EXIT
# Print the ulimit & kernel.core_pattern
cmds="uptime"
cmds="$cmds; ulimit -a"
cmds="$cmds; if [ -f /sbin/sysctl ]"
cmds="$cmds; then /sbin/sysctl kernel.core_pattern"
cmds="$cmds; fi"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|3} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
${set_sudo}
# Set up curator to collect system & process stats on remote.
if [ "Windows_NT" = "$OS" ]; then
variant=windows
else
variant=ubuntu1604
fi
# Download stable version of curator
curator_hash=117d1a65256ff78b6d15ab79a1c7088443b936d0
curator_url="https://s3.amazonaws.com/boxes.10gen.com/build/curator/curator-dist-$variant-$curator_hash.tar.gz"
cmds="curl -s $curator_url | tar -xzv"
if [ "Windows_NT" = "$OS" ]; then
# Since curator runs as SYSTEM user, ensure the output files can be accessed.
cmds="$cmds; touch ${monitor_system_file}; chmod 777 ${monitor_system_file}"
cmds="$cmds; cygrunsrv --install curator_sys --path curator --chdir \$HOME --args 'stat system --file ${monitor_system_file}'"
cmds="$cmds; touch ${monitor_proc_file}; chmod 777 ${monitor_proc_file}"
cmds="$cmds; cygrunsrv --install curator_proc --path curator --chdir \$HOME --args 'stat process-all --file ${monitor_proc_file}'"
cmds="$cmds; cygrunsrv --start curator_sys"
cmds="$cmds; cygrunsrv --start curator_proc"
else
cmds="$cmds; cmd=\"@reboot cd \$HOME && $sudo ./curator stat system >> ${monitor_system_file}\""
cmds="$cmds; (crontab -l ; echo \"\$cmd\") | crontab -"
cmds="$cmds; cmd=\"@reboot cd \$HOME && $sudo ./curator stat process-all >> ${monitor_proc_file}\""
cmds="$cmds; (crontab -l ; echo \"\$cmd\") | crontab -"
cmds="$cmds; crontab -l"
cmds="$cmds; { $sudo \$HOME/curator stat system --file ${monitor_system_file} > /dev/null 2>&1 & $sudo \$HOME/curator stat process-all --file ${monitor_proc_file} > /dev/null 2>&1 & } & disown"
fi
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
${set_sudo}
# Many systems have the firewall disabled, by default. In case the firewall is
# enabled we add rules for the mongod ports on the remote.
# RHEL 7 firewall rules
if [ ! -z "$(which firewall-cmd 2> /dev/null)" ]; then
cmds="$sudo firewall-cmd --permanent --zone=public --add-port=ssh/tcp"
cmds="$cmds; $sudo firewall-cmd --permanent --zone=public --add-port=${standard_port}/tcp"
cmds="$cmds; $sudo firewall-cmd --permanent --zone=public --add-port=${secret_port}/tcp"
cmds="$cmds; $sudo firewall-cmd --reload"
cmds="$cmds; $sudo firewall-cmd --list-all"
# ArchLinux, Debian, RHEL 6 firewall rules
elif [ ! -z "$($sudo iptables --list 2> /dev/null)" ]; then
cmds="$sudo iptables -I INPUT 1 -p tcp --dport ssh -j ACCEPT"
cmds="$cmds; $sudo iptables -I INPUT 1 -p tcp --dport ${standard_port} -j ACCEPT"
cmds="$cmds; $sudo iptables -I INPUT 1 -p tcp --dport ${secret_port} -j ACCEPT"
if [ -d /etc/iptables ]; then
rules_file=/etc/iptables/iptables.rules
elif [ -f /etc/sysconfig/iptables ]; then
rules_file=/etc/sysconfig/iptables
else
rules_file=/etc/iptables.up.rules
fi
cmds="$cmds; $sudo iptables-save | $sudo tee $rules_file"
cmds="$cmds; $sudo iptables --list-rules"
elif [ ! -z "$($sudo service iptables status 2> /dev/null)" ]; then
cmds="$sudo iptables -I INPUT 1 -p tcp --dport ssh -j ACCEPT"
cmds="$cmds; $sudo iptables -I INPUT 1 -p tcp --dport ${standard_port} -j ACCEPT"
cmds="$cmds; $sudo iptables -I INPUT 1 -p tcp --dport ${secret_port} -j ACCEPT"
cmds="$cmds; $sudo service iptables save"
cmds="$cmds; $sudo service iptables status"
# Ubuntu firewall rules
elif [ ! -z "$($sudo ufw status 2> /dev/null)" ]; then
cmds="$sudo ufw allow ssh/tcp"
cmds="$cmds; $sudo ufw allow ${standard_port}/tcp"
cmds="$cmds; $sudo ufw allow ${secret_port}/tcp"
cmds="$cmds; $sudo ufw reload"
cmds="$cmds; $sudo ufw status"
# SuSE firewall rules
# TODO: Add firewall rules using SuSEfirewall2
elif [ ! -z "$($sudo /sbin/SuSEfirewall2 help 2> /dev/null)" ]; then
cmds="$sudo /sbin/SuSEfirewall2 stop"
cmds="$cmds; $sudo /sbin/SuSEfirewall2 off"
# Windows firewall rules
elif [ ! -z "$(netsh advfirewall show store 2> /dev/null)" ]; then
add_rule="netsh advfirewall firewall add rule"
cmds="$add_rule name='MongoDB port ${standard_port} in' dir=in action=allow protocol=TCP localport=${standard_port}"
cmds="$cmds; $add_rule name='MongoDB port ${standard_port} out' dir=in action=allow protocol=TCP localport=${standard_port}"
cmds="$cmds; $add_rule name='MongoDB port ${secret_port} in' dir=in action=allow protocol=TCP localport=${secret_port}"
cmds="$cmds; $add_rule name='MongoDB port ${secret_port} out' dir=in action=allow protocol=TCP localport=${secret_port}"
cmds="$cmds; netsh advfirewall firewall show rule name=all | grep -A 13 'MongoDB'"
else
echo "Firewall not active or unknown firewall command on this platform"
exit 0
fi
set -o errexit
if [ ! -z "$cmds" ]; then
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
fi
- command: shell.exec
params:
shell: bash
working_dir: src
script: |
set -o errexit
if [[ "Windows_NT" != "$OS" || -z "${windows_crash_zip}" ]]; then
exit 0
fi
# Install NotMyFault, used to crash Windows.
cmds="curl -s -o ${windows_crash_zip} ${windows_crash_dl}"
cmds="$cmds; unzip -q ${windows_crash_zip} -d ${windows_crash_dir}"
cmds="$cmds; chmod +x ${windows_crash_dir}/*.exe"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
- *copy_ec2_monitor_files
### Determine & set remote EC2 IP address ###
"get EC2 address": &get_ec2_address
command: shell.exec
params:
shell: bash
working_dir: src
script: |
if [ -z "${instance_id}" ]; then
exit 0
fi
# Ensure we use the latest private_ip_address, as it could change if the EC2 instance
# has been stopped and started.
${activate_virtualenv}
# Specify '--mode start' to ensure the remote instance is running.
now=$(date +'%Y%m%d%H%M%S')
aws_ec2_status_yml=aws_ec2_status.yml
$python buildscripts/aws_ec2.py \
--imageId ${instance_id} \
--mode start \
--yamlFile $aws_ec2_status_yml \
--consoleOutputFile ec2_console_$now.log \
--consoleScreenshotFile ec2_console_screen_shot_$now.jpg
private_ip_address=$($python buildscripts/yaml_key_value.py --yamlFile $aws_ec2_status_yml --yamlKey private_ip_address)
echo "private_ip_address: $private_ip_address" > private_ip_address.yml
"update EC2 address": &update_ec2_address
command: expansions.update
params:
file: src/private_ip_address.yml
### Process & archive remote EC2 artifacts ###
"tar EC2 artifacts": &tar_ec2_artifacts
command: shell.exec
params:
shell: bash
working_dir: src
script: |
# Tar/zip artifacts on remote host.
if [[ -z "${ec2_artifacts}" || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
cmd="${tar|tar} czf ec2_artifacts.tgz ${ec2_artifacts}"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmd"
"copy EC2 artifacts": &copy_ec2_artifacts
command: shell.exec
params:
shell: bash
working_dir: src
script: |
# Copy remote artifacts.
if [[ -z "${ec2_artifacts}" || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_from" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--file ec2_artifacts.tgz
"cleanup EC2 instance": &cleanup_ec2_instance
command: shell.exec
params:
shell: bash
working_dir: src
script: |
# We do not terminate the EC2 instance if there was an ec2_ssh_failure.
if [[ -z ${instance_id|""} || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
${activate_virtualenv}
echo "Terminating $instance_id"
aws_ec2=$($python buildscripts/aws_ec2.py --imageId ${instance_id} --mode terminate)
echo "Terminated AMI EC2 instance: $aws_ec2"
# The event logs on Windows are a useful diagnostic to have when determining if something bad
# happened to the remote machine after it was repeatedly crashed during powercycle testing. For
# example, the Application and System event logs have previously revealed that the mongod.exe
# process abruptly exited due to not being able to open a file despite the process successfully
# being restarted and responding to network requests.
"gather remote event logs": &gather_remote_event_logs
command: shell.exec
params:
shell: bash
working_dir: src
script: |
if [[ "Windows_NT" != "$OS" || ! -f ${aws_ec2_yml|""} || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
cmds="mkdir -p ${event_logpath}"
cmds="$cmds; wevtutil qe Application /c:10000 /rd:true /f:Text > ${event_logpath}/application.log"
cmds="$cmds; wevtutil qe Security /c:10000 /rd:true /f:Text > ${event_logpath}/security.log"
cmds="$cmds; wevtutil qe System /c:10000 /rd:true /f:Text > ${event_logpath}/system.log"
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries|0} \
--commands "$cmds"
"gather remote mongo coredumps": &gather_remote_mongo_coredumps
command: shell.exec
params:
shell: bash
working_dir: "src"
script: |
if [[ ! -f ${aws_ec2_yml|""} || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
remote_dir=${remote_dir|.}
# Find all core files and move to $remote_dir
cmds="core_files=\$(/usr/bin/find -H . \( -name '*.core' -o -name '*.mdmp' \) 2> /dev/null)"
cmds="$cmds; if [ -z \"\$core_files\" ]; then exit 0; fi"
cmds="$cmds; echo Found remote core files \$core_files, moving to \$(pwd)"
cmds="$cmds; for core_file in \$core_files"
cmds="$cmds; do base_name=\$(echo \$core_file | sed 's/.*\///')"
cmds="$cmds; if [ ! -f \$base_name ]; then mv \$core_file .; fi"
cmds="$cmds; done"
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries} \
--commands "$cmds" \
--commandDir $remote_dir
"copy remote mongo coredumps": &copy_remote_mongo_coredumps
command: shell.exec
params:
shell: bash
working_dir: "src"
script: |
if [[ ! -f ${aws_ec2_yml|""} || -n "${ec2_ssh_failure}" ]]; then
exit 0
fi
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
remote_dir=${remote_dir|.}
${activate_virtualenv}
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_from" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries} \
--file "$remote_dir/*.core" \
--file "$remote_dir/*.mdmp"
# Since both type of core files do not exist on the same host, this command
# will always return non-zero. As the core file retrieval is optional, we
# always exit successfully.
exit 0
"archive remote EC2 artifacts": &archive_remote_ec2_artifacts
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/ec2_artifacts.tgz
remote_file: ${project}/${build_variant}/${revision}/remote_ec2/remote_ec2_artifacts-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Remote EC2 Artifacts - Execution ${execution}
optional: true
"archive remote EC2 monitor files": &archive_remote_ec2_monitor_files
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/ec2_monitor_files.tgz
remote_file: ${project}/${build_variant}/${revision}/remote_ec2/remote_ec2_monitor-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Remote EC2 Monitor - Execution ${execution}
optional: true
"gather EC2 console artifacts": &gather_ec2_console_artifacts
command: shell.exec
params:
working_dir: src
script: |
ec2_console_files=$(ls ec2_console* 2> /dev/null)
if [ -n "$ec2_console_files" ]; then
${tar|tar} czf ec2_console_files.tgz $ec2_console_files
fi
"archive EC2 console artifacts": &archive_ec2_console_artifacts
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/ec2_console_files.tgz
remote_file: ${project}/${build_variant}/${revision}/ec2/ec2_console-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/x-gzip}
display_name: EC2 Console files - Execution ${execution}
optional: true
"save ec2 task artifacts":
- *get_ec2_address
- *update_ec2_address
- *gather_remote_event_logs
- *tar_ec2_artifacts
- *copy_ec2_artifacts
- *gather_remote_mongo_coredumps
- *copy_remote_mongo_coredumps
- *gather_ec2_console_artifacts
- *archive_ec2_console_artifacts
- *cleanup_ec2_instance
- *archive_remote_ec2_artifacts
- *archive_remote_ec2_monitor_files
### Process & archive local client logs ###
"tar local client logs": &tar_local_client_logs
command: shell.exec
params:
working_dir: src
script: |
client_logs=$(ls crud*.log fsm*.log 2> /dev/null)
if [ ! -z "$client_logs" ]; then
${tar|tar} czf client-logs.tgz $client_logs
fi
"archive local client logs": &archive_local_client_logs
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/client-logs.tgz
remote_file: ${project}/${build_variant}/${revision}/client_logs/mongo-client-logs-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Client logs - Execution ${execution}
optional: true
"save local client logs":
- *tar_local_client_logs
- *archive_local_client_logs
### Clear and print OOM messages ###
"clear OOM messages":
command: shell.exec
params:
system_log: true
script: |
ulimit -a
# Clear the dmesg ring buffer. The "post" phase will check dmesg for OOM messages.
${set_sudo}
$sudo dmesg -c > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "Cleared the dmesg ring buffer"
else
echo "Could not clear the dmesg ring buffer"
fi
"print OOM messages":
# Print out any Out of Memory killed process messages.
command: shell.exec
params:
system_log: true
working_dir: src # Temporary files created in src will be cleaned up in "pre".
script: |
${set_sudo}
# Use dmesg -T option, if supported, to display timestamps.
dmesg=dmesg
$sudo dmesg -T > /dev/null 2>&1
if [ $? -eq 0 ]; then
dmesg="dmesg -T"
fi
$sudo $dmesg 2> /dev/null > dmesg.txt
if [ $? -ne 0 ]; then
echo "Cannot check for OOM (Out of memory) killed processes on this platform"
exit 0
fi
grep -iE '(Out of memory|OOM[- ]killer|Killed process)' dmesg.txt > oom.txt
if [ -s oom.txt ]; then
echo "OOM (Out of memory) killed processes detected"
cat oom.txt
else
echo "No OOM (Out of memory) killed processes detected"
fi
### Cleanup after the watchdog FUSE testing ###
"cleanup FUSE watchdog":
command: shell.exec
params:
working_dir: src
script: |
if [ -d /data/thrift ]; then
rm -rf /data/thrift
fi
if [ -d /data/charybdefs ]; then
rm -rf /data/charybdefs
fi
### Process & archive Code Coverage artifacts ###
"process code coverage data": &process_code_coverage_data
command: shell.exec
params:
working_dir: src
script: |
set +o errexit
if [ -d "./build" ]; then
file_list=$(find ./build -type f -name "*.gcda")
if [ -n "$file_list" ]; then
for gcda_file in $file_list; do
echo "Processing file $gcda_file"
/opt/mongodbtoolchain/v3/bin/gcov -i $gcda_file
base_name=$(echo $gcda_file | rev | cut -f1 -d '/' | cut -f2 -d '.' | rev)
gcov_file=$base_name.gcda.gcov
if [ -f "$gcov_file" ]; then
# Add a prefix to the intermediate file, since it does not have a unique name.
# Convert the '/' to '#' in the file path.
file_prefix=$(echo $gcda_file | sed -e 's,^\./,,' | rev | cut -f2- -d '/' | rev | tr -s '/' '#')
new_gcov_file=$file_prefix#$base_name.gcda.gcov
if [ ! -f $new_gcov_file ]; then
echo "Renaming gcov intermediate file $gcov_file to $new_gcov_file"
mv $gcov_file $new_gcov_file
else
# We treat this as a fatal condition and remove all of the coverage files.
echo "Not renaming $gcov_file as $new_gcov_file already exists!"
rm -f *.gcda.gcov
exit 1
fi
fi
rm $gcda_file
done
fi
fi
"tar code coverage data": &tar_code_coverage_data
command: archive.targz_pack
params:
target: "src/gcov-intermediate-files.tgz"
source_dir: "src"
include:
- "*.gcda.gcov"
"archive code coverage data": &archive_code_coverage_data
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/gcov-intermediate-files.tgz"
remote_file: ${project}/${build_variant}/${revision}/gcov/gcov-intermediate-files-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: gcov intermediate files - Execution ${execution}
optional: true
"save code coverage data":
- *process_code_coverage_data
- *tar_code_coverage_data
- *archive_code_coverage_data
"tar jepsen logs": &tar_jepsen_logs
command: archive.targz_pack
params:
target: "src/jepsen-mongod-logs.tgz"
source_dir: "${workdir}/src/jepsen-workdir"
include:
- "./**.log"
"archive jepsen logs": &archive_jepsen_logs
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/jepsen-mongod-logs.tgz
remote_file: ${project}/${build_variant}/${revision}/jepsen/jepsen-mongod-logs-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Jepsen mongod Logs - ${execution}
optional: true
"tar jepsen results": &tar_jepsen_results
command: archive.targz_pack
params:
target: "src/jepsen-results.tgz"
source_dir: "src/jepsen-mongodb/store"
include:
- "./**"
"archive jepsen results": &archive_jepsen_results
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/jepsen-results.tgz
remote_file: ${project}/${build_variant}/${revision}/jepsen/jepsen-results-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Jepsen Test Results - ${execution}
optional: true
"save jepsen artifacts":
- *tar_jepsen_logs
- *archive_jepsen_logs
- *tar_jepsen_results
- *archive_jepsen_results
### Process & archive mongo coredumps ###
"gather mongo coredumps": &gather_mongo_coredumps
command: shell.exec
params:
working_dir: "src"
script: |
# Find all core files and move to src
core_files=$(/usr/bin/find -H .. \( -name "*.core" -o -name "*.mdmp" \) 2> /dev/null)
for core_file in $core_files
do
base_name=$(echo $core_file | sed "s/.*\///")
# Move file if it does not already exist
if [ ! -f $base_name ]; then
mv $core_file .
fi
done
"tar mongo coredumps": &tar_mongo_coredumps
command: archive.targz_pack
params:
target: "mongo-coredumps.tgz"
source_dir: "src"
include:
- "./**.core"
- "./**.mdmp" # Windows: minidumps
"archive mongo coredumps": &archive_mongo_coredumps
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: mongo-coredumps.tgz
remote_file: ${project}/${build_variant}/${revision}/coredumps/mongo-coredumps-${build_id}-${task_name}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Core Dumps - Execution ${execution}
optional: true
"save mongo coredumps":
- *gather_mongo_coredumps
- *tar_mongo_coredumps
- *archive_mongo_coredumps
### Process & archive failed unittest artifacts ###
"gather failed unittests": &gather_failed_unittests
command: shell.exec
params:
working_dir: "src"
script: |
mkdir unittest_binaries || true
# Find all core files
core_files=$(/usr/bin/find -H . \( -name "dump_*.core" -o -name "*.mdmp" \) 2> /dev/null)
for core_file in $core_files
do
# A core file name does not always have the executable name that generated it.
# See http://stackoverflow.com/questions/34801353/core-dump-filename-gets-thread-name-instead-of-executable-name-with-core-pattern
# On platforms with GDB, we get the binary name from core file
gdb=/opt/mongodbtoolchain/gdb/bin/gdb
if [ -f $gdb ]; then
binary_file=$($gdb -batch --quiet -ex "core $core_file" 2> /dev/null | grep "Core was generated" | cut -f2 -d "\`" | cut -f1 -d "'" | cut -f1 -d " ")
binary_file_locations=$binary_file
else
# Find the base file name from the core file name, note it may be truncated.
# Remove leading 'dump_' and trailing '.<pid>.core' or '.<pid or time>.mdmp'
binary_file=$(echo $core_file | sed "s/.*\///;s/dump_//;s/\..*\.core//;s/\..*\.mdmp//")
# Locate the binary file. Since the base file name might be truncated, the find
# may return more than 1 file.
binary_file_locations=$(/usr/bin/find -H . -name "$binary_file*${exe}" 2> /dev/null)
fi
if [ -z "$binary_file_locations" ]; then
echo "Cannot locate the unittest binary file ($binary_file) that generated the core file $core_file"
fi
for binary_file_location in $binary_file_locations
do
new_binary_file=unittest_binaries/$(echo $binary_file_location | sed "s/.*\///")
if [ ! -f $new_binary_file ]; then
mv $binary_file_location $new_binary_file
fi
# On Windows if a .pdb symbol file exists, include it in the archive.
pdb_file=$(echo $binary_file_location | sed "s/\.exe/.pdb/")
if [ -f $pdb_file ]; then
new_pdb_file=unittest_binaries/$(echo $pdb_file | sed "s/.*\///")
mv $pdb_file $new_pdb_file
fi
done
done
"tar failed unittests": &tar_failed_unittests
command: archive.targz_pack
params:
target: "mongo-unittests.tgz"
source_dir: "src/unittest_binaries"
include:
- "./*_test${exe}"
"archive failed unittests": &archive_failed_unittests
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: mongo-unittests.tgz
remote_file: ${project}/${build_variant}/${revision}/unittests/mongo-unittests-${build_id}-${task_name}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Unit tests - Execution ${execution}
optional: true
"save failed unittests":
- *gather_failed_unittests
- *tar_failed_unittests
- *archive_failed_unittests
"archive dbtest": &archive_dbtest
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: dbtest-binary.tgz
remote_file: ${project}/${build_variant}/${revision}/dbtest/dbtest-${build_id}-${task_name}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: dbtest binary - Execution ${execution}
optional: true
"archive dbtest debugsymbols": &archive_dbtest_debug
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: dbtest-debugsymbols.tgz
remote_file: ${project}/${build_variant}/${revision}/dbtest/dbtest-${build_id}-${task_name}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: dbtest debugsymbols
optional: true
"save unstripped dbtest":
- *archive_dbtest
- *archive_dbtest_debug
### Process & archive artifacts from hung processes ###
"run hang analyzer":
command: shell.exec
params:
working_dir: src
script: |
set -o verbose
hang_analyzer_option="-o file -o stdout -p ${hang_analyzer_processes|dbtest,java,mongo,mongod,mongos,python,_test}"
if [ ${is_asan_build|false} = false ]; then
hang_analyzer_option="-c $hang_analyzer_option"
fi
${activate_virtualenv}
echo "Calling the hang analyzer: PATH=\"/opt/mongodbtoolchain/gdb/bin:$PATH\" $python buildscripts/hang_analyzer.py $hang_analyzer_option"
PATH="/opt/mongodbtoolchain/gdb/bin:$PATH" $python buildscripts/hang_analyzer.py $hang_analyzer_option
# Call hang_analyzer.py script for tasks that are running remote mongo processes
if [ -n "${private_ip_address}" ]; then
core_ext=core
if [ "Windows_NT" = "$OS" ]; then
core_ext=mdmp
fi
ssh_connection_options="${ssh_identity} ${ssh_connection_options}"
# buildscripts must be installed in ${remote_dir} on the remote host.
remote_dir=${remote_dir|.}
# Copy mongoDB debug symbols to the remote host.
debug_files=$(ls *.debug *.dSYM *.pdb 2> /dev/null)
for debug_file in $debug_files
do
file_param="$file_param --file $debug_file"
done
if [ ! -z "$file_param" ]; then
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_to" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries} \
$file_param \
--remoteDir $remote_dir
fi
# Activate virtualenv on remote host. The virtualenv bin_dir is different for Linux and
# Windows.
bin_dir=$(find $VIRTUAL_ENV -name activate | sed -e "s,$VIRTUAL_ENV,,;s,activate,,;s,/,,g")
cmds=". ${virtualenv_dir|venv}/$bin_dir/activate"
# In the 'cmds' variable we pass to remote host, use 'python' instead of '$python' since
# we don't want to evaluate the local python variable, but instead pass the python string
# so the remote host will use the right python when the virtualenv is sourced.
cmds="$cmds; cd ${remote_dir}"
cmds="$cmds; PATH=\"/opt/mongodbtoolchain/gdb/bin:\$PATH\" python buildscripts/hang_analyzer.py $hang_analyzer_option"
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries} \
--commands "$cmds"
$python buildscripts/remote_operations.py \
--verbose \
--userHost $USER@${private_ip_address} \
--operation "copy_from" \
--sshConnectionOptions "$ssh_connection_options" \
--retries ${ssh_retries} \
--file "$remote_dir/debugger*.*" \
--file "$remote_dir/*.$core_ext"
fi
"tar hang analyzer debugger files": &tar_hang_analyzer_debugger_files
command: archive.targz_pack
params:
target: "src/mongo-hanganalyzer.tgz"
source_dir: "src"
include:
- "./debugger*.*"
"archive hang analyzer debugger files": &archive_hang_analyzer_debugger_files
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongo-hanganalyzer.tgz
remote_file: ${project}/${build_variant}/${revision}/hanganalyzer/mongo-hanganalyzer-${build_id}-${task_name}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Hang Analyzer Output - Execution ${execution}
optional: true
"save hang analyzer debugger files":
- *tar_hang_analyzer_debugger_files
- *archive_hang_analyzer_debugger_files
### Process & archive disk statistic artifacts ###
"tar disk statistics": &tar_disk_statistics
command: archive.targz_pack
params:
target: "diskstats.tgz"
source_dir: "./"
include:
- "./mongo-diskstats*"
- "./mongo-diskstats*.csv"
"archive disk statistics": &archive_disk_statistics
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: diskstats.tgz
remote_file: ${project}/${build_variant}/${revision}/diskstats/mongo-diskstats-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Disk Stats - Execution ${execution}
optional: true
"save disk statistics":
- *tar_disk_statistics
- *archive_disk_statistics
### Process & archive system resource artifacts ###
"tar system resource information": &tar_system_resource_information
command: archive.targz_pack
params:
target: "system-resource-info.tgz"
source_dir: src
include:
- "./system_resource_info*"
"archive system resource information": &archive_system_resource_information
command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: system-resource-info.tgz
remote_file: ${project}/${build_variant}/${revision}/systemresourceinfo/mongo-system-resource-info-${task_id}-${execution}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: System Resource Info - Execution ${execution}
optional: true
"save system resource information":
- *tar_system_resource_information
- *archive_system_resource_information
### Attach report & artifacts ###
"attach scons config log":
command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/build/scons/config.log
remote_file: ${project}/${build_variant}/${revision}/artifacts/config-${build_id}.log
bucket: mciuploads
permissions: public-read
content_type: text/plain
display_name: config.log
"attach report":
command: attach.results
params:
file_location: ${report_file|src/report.json}
"attach artifacts":
command: attach.artifacts
params:
optional: true
ignore_artifacts_for_spawn: false
files:
- ${archive_file|src/archive.json}
"attach wiki page":
- command: shell.exec
params:
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python -c 'import json; print(json.dumps([{
"name": "Wiki: Running Tests from Evergreen Tasks Locally",
"link": "https://github.com/mongodb/mongo/wiki/Running-Tests-from-Evergreen-Tasks-Locally",
"visibility": "public",
"ignore_for_fetch": True
}]))' > wiki_page_location.json
- command: attach.artifacts
params:
files:
- wiki_page_location.json
# Pre task steps
pre:
- func: "kill processes"
- func: "cleanup environment"
- func: "set task expansion macros"
- func: "clear OOM messages"
# Post task steps
post:
- func: "attach report"
- func: "attach artifacts"
- func: "save ec2 task artifacts"
- func: "call BF Suggestion service"
- func: "attach wiki page"
- func: "kill processes"
- func: "save local client logs"
- func: "save code coverage data"
- func: "save jepsen artifacts"
- func: "save mongo coredumps"
- func: "save failed unittests"
- func: "save hang analyzer debugger files"
- func: "save disk statistics"
- func: "save system resource information"
- func: "print OOM messages"
- func: "umount shared scons directory"
- func: "cleanup FUSE watchdog"
- func: "cleanup environment"
# Timeout steps
timeout:
- func: "get EC2 address"
- func: "update EC2 address"
- func: "run hang analyzer"
#######################################
# Tasks #
#######################################
tasks:
## compile - build all scons targets except unittests ##
- name: compile
depends_on: []
commands:
- func: "scons compile"
vars:
targets: >-
archive-dist-test
archive-dist-test-debug
distsrc-${ext|tgz}
${additional_targets|}
${mh_target|}
task_compile_flags: >-
--detect-odr-violations
--separate-debug
PREFIX=dist-test
# Tar unstripped dbtest, to be archived in case of failure
- command: archive.targz_pack
params:
target: "dbtest_unstripped.tgz"
source_dir: "src"
include:
- "./dbtest*"
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
if [ $(find . -name mongocryptd${exe} | wc -l) -eq 1 ] ; then
# Validate that this build_variant is listed as a known enterprise task for mongocryptd
PATH=$PATH:$HOME $python ../buildscripts/validate_mongocryptd.py --variant "${build_variant}" ../etc/evergreen.yml
fi
- command: archive.targz_pack
params:
target: "artifacts.tgz"
source_dir: "src"
include:
- "./build/**.gcno"
- "./etc/*san.suppressions"
- "./etc/pip/**"
- "./etc/repo_config.yaml"
- "./etc/scons/**"
- "artifacts.json"
- "buildscripts/**"
- "bypass_compile_expansions.yml"
- "compile_expansions.yml"
- "jstests/**"
- "library_dependency_graph.json"
- "patch_files.txt"
- "pytests/**"
- "src/mongo/client/sdam/json_tests/**"
- "src/mongo/db/modules/enterprise/docs/**"
- "src/mongo/db/modules/enterprise/jstests/**"
- "src/mongo/db/modules/subscription/jstests/**"
- "src/mongo/util/options_parser/test_config_files/**"
- "src/third_party/JSON-Schema-Test-Suite/tests/draft4/**"
- "src/third_party/mock_ocsp_responder/**"
exclude_files:
- "*_test.pdb"
- func: "upload debugsymbols"
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-binaries.${ext|tgz}
remote_file: ${mongo_binaries}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Binaries
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd.${ext|tgz}
remote_file: ${mongo_cryptd}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: CryptD Binaries
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd.${ext|tgz}
remote_file: ${mongo_cryptd_debugsymbols}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: CryptD Debugsymbols
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mh-binaries.${ext|tgz}
remote_file: ${mh_archive}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: MH Binaries
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mh-debugsymbols.${ext|tgz}
remote_file: ${mh_debugsymbols}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: MH Debuginfo
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: artifacts.tgz
remote_file: ${project}/${build_variant}/${revision}/artifacts/${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: Artifacts
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/distsrc.${ext|tgz}
remote_file: ${project}/${build_variant}/${revision}/sources/mongo-src-${build_id}.${ext|tgz}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Source tarball
# We only need to upload the source tarball from one of the build variants
# because it should be the same everywhere, so just use rhel70/windows.
build_variants: [rhel70, windows]
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/scons_cache.log
content_type: text/plain
remote_file: ${project}/${build_variant}/${revision}/scons-cache-${build_id}-${execution}.log
bucket: mciuploads
permissions: public-read
display_name: SCons cache debug log
# For patch builds that bypass compile, we upload links to pre-existing tarballs, except for the
# artifacts.tgz.
- command: attach.artifacts
params:
optional: true
ignore_artifacts_for_spawn: false
files:
- src/artifacts.json
## compile_core_tools - minimal version of compile used for commit queue ##
- name: compile_core_tools
depends_on: []
commands:
- func: "scons compile"
vars:
targets: install-core install-tools archive-dist ${mh_target|}
compiling_for_test: true
additional_targets: ""
task_compile_flags: >-
--detect-odr-violations
--separate-debug
- name: compile_ninja
commands:
- func: "scons compile"
vars:
task_compile_flags: >-
--ninja=next
targets:
build.ninja
- command: shell.exec
params:
working_dir: src
shell: bash
script: ninja -f build.ninja
## compile_all - build all scons targets ##
- name: compile_all
commands:
- func: "scons compile"
vars:
targets: install-all
compiling_for_test: true
task_compile_flags: >-
--detect-odr-violations
--separate-debug
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/library_dependency_graph.json
remote_file: ${project}/${build_variant}/${revision}/library_dependency_graph.${build_id}.json
bucket: mciuploads
permissions: public-read
content_type: application/json
display_name: Library Dependency Graph (library_dependency_graph.json)
build_variants: [enterprise-rhel-70-64-bit-kitchen-sink] # This must be the Dagger variant
## clang_tidy - run clang_tidy
- name: clang_tidy
exec_timeout_secs: 14400
commands:
- func: "scons compile"
vars:
targets: generated-sources compiledb
task_compile_flags: >-
--detect-odr-violations
compiling_for_test: true
- command: shell.exec
type: test
timeout_secs: 14400
params:
working_dir: "src"
script: |
set -o errexit
set -o verbose
# This could probably be accelerated with gnu parallel
/opt/mongodbtoolchain/v3/bin/clang-tidy -p ./compile_commands.json --checks="-*,bugprone-unused-raii" -warnings-as-errors="*" $(jq -r '.[] | .file' compile_commands.json | grep src/mongo)
## unittests - run unittests ##
- name: unittests
commands:
- func: "scons compile"
vars:
targets: install-unittests
task_compile_flags: >-
--detect-odr-violations
--separate-debug
compiling_for_test: true
- func: "run diskstats"
- func: "monitor process threads"
- func: "collect system resource info"
- func: "run tests"
vars:
resmoke_args: --suites=unittests
##compile_libfuzzertests - build libfuzzertests ##
- name: compile_libfuzzertests
commands:
- func: "scons compile"
vars:
targets: archive-fuzzertests
task_compile_flags: >-
--detect-odr-violations
compiling_for_test: true
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/fuzzertests-runtime.tgz"
remote_file: "${project}/libfuzzer-tests/${build_variant}/${revision}/libfuzzer-tests.tgz"
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: "LibFuzzer Tests"
## libfuzzertests - run libfuzzertests ##
- name: libfuzzertests
commands:
- func: "fetch corpus"
- func: "extract corpus"
- func: "run tests"
vars:
resmoke_args: --suites=libfuzzer
- name: server_discovery_and_monitoring_json_test
commands:
- func: "scons compile"
vars:
targets: "install-sdam-json-test"
task_compile_flags: >-
--detect-odr-violations
compiling_for_test: true
- func: "run tests"
vars:
resmoke_args: --suites=sdam_json_test
## dbtest ##
- name: dbtest
commands:
- func: "scons compile"
vars:
targets: archive-dbtest archive-dbtest-debug
task_compile_flags: >-
--detect-odr-violations
--separate-debug
compiling_for_test: true
- func: "run diskstats"
- func: "monitor process threads"
- func: "collect system resource info"
- func: "run tests"
vars:
resmoke_args: --suites=dbtest --storageEngine=wiredTiger
install_dir: build/install/bin
## embedded_sdk_build_and_test_* - build the embedded-dev and embedded-test targets only ##
- name: embedded_sdk_build_cdriver
commands:
- command: shell.exec
params:
script: |
set -o errexit
set -o verbose
VERSION=${version}
WORKDIR=${workdir}
# build in a different directory then we run tests so that we can verify that the linking
# of tests are not relying any built in absolute paths
FINAL_PREFIX=$WORKDIR/src/build/mongo-embedded-sdk-$VERSION
BUILD_PREFIX=$FINAL_PREFIX-tmp
rm -rf mongo-c-driver
# NOTE: If you change the C Driver version here, also change the substitution in the CocoaPod podspec below in the apple builder.
git clone --branch r1.13 --depth 1 https://github.com/mongodb/mongo-c-driver.git
cd mongo-c-driver
# Fixup VERSION so we don't end up with -dev on it. Remove this once we are building a stable version and CDRIVER-2861 is resolved.
cp -f VERSION_RELEASED VERSION_CURRENT
trap "cat CMakeFiles/CMakeOutput.log" EXIT
export ${compile_env|}
${cmake_path|/opt/cmake/bin/cmake} -DCMAKE_INSTALL_PREFIX=$BUILD_PREFIX -DENABLE_SHM_COUNTERS=OFF -DENABLE_SNAPPY=OFF -DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF -DENABLE_TESTS=OFF -DENABLE_EXAMPLES=OFF -DENABLE_STATIC=OFF -DCMAKE_OSX_DEPLOYMENT_TARGET=${cdriver_cmake_osx_deployment_target|} ${cdriver_cmake_flags}
trap - EXIT # cancel the previous trap '...' EXIT
make install VERBOSE=1
# TODO: Remove this when we upgrade to a version of the C driver that has CDRIVER-2854 fixed.
mkdir -p $BUILD_PREFIX/share/doc/mongo-c-driver
cp COPYING $BUILD_PREFIX/share/doc/mongo-c-driver
cp THIRD_PARTY_NOTICES $BUILD_PREFIX/share/doc/mongo-c-driver
if [ -d $BUILD_PREFIX/Frameworks ]; then
# We need to account for the fact that on the Darwin
# mobile platforms, things shouldn't go to the Resources
# directory but on macOS they should. If the Resources
# directory is there (it should be for the plist file),
# then use it, otherwise just use the framework root.
if [ -e $BUILD_PREFIX/Frameworks/mongoc.framework/Resources ]; then
cp COPYING $BUILD_PREFIX/Frameworks/mongoc.framework/Resources
cp THIRD_PARTY_NOTICES $BUILD_PREFIX/Frameworks/mongoc.framework/Resources
plutil -insert MinimumOSVersion -string ${cdriver_cmake_osx_deployment_target} $BUILD_PREFIX/Frameworks/mongoc.framework/Resources/Info.plist
else
cp COPYING $BUILD_PREFIX/Frameworks/mongoc.framework
cp THIRD_PARTY_NOTICES $BUILD_PREFIX/Frameworks/mongoc.framework
plutil -insert MinimumOSVersion -string ${cdriver_cmake_osx_deployment_target} $BUILD_PREFIX/Frameworks/mongoc.framework/Info.plist
fi
if [ -e $BUILD_PREFIX/Frameworks/bson.framework/Resources ]; then
cp COPYING $BUILD_PREFIX/Frameworks/bson.framework/Resources
cp THIRD_PARTY_NOTICES $BUILD_PREFIX/Frameworks/bson.framework/Resources
plutil -insert MinimumOSVersion -string ${cdriver_cmake_osx_deployment_target} $BUILD_PREFIX/Frameworks/bson.framework/Resources/Info.plist
else
cp COPYING $BUILD_PREFIX/Frameworks/bson.framework
cp THIRD_PARTY_NOTICES $BUILD_PREFIX/Frameworks/bson.framework
plutil -insert MinimumOSVersion -string ${cdriver_cmake_osx_deployment_target} $BUILD_PREFIX/Frameworks/bson.framework/Info.plist
fi
fi
# CMake doesn't seem to do the dSYM for us for the framework
if [ -e $BUILD_PREFIX/Frameworks ]; then
pushd $BUILD_PREFIX/Frameworks
xcrun dsymutil -num-threads=1 -o bson.framework.dSYM bson.framework/bson
xcrun strip -Sx bson.framework/bson
xcrun dsymutil -num-threads=1 -o mongoc.framework.dSYM mongoc.framework/mongoc
xcrun strip -Sx mongoc.framework/mongoc
popd
fi
mv $BUILD_PREFIX $FINAL_PREFIX
- name: embedded_sdk_install_dev
commands:
- func: "scons compile"
vars:
targets: install-embedded-dev
task_compile_flags: &embedded_sdk_compile_flags >-
--allocator=system
--dbg=off
--enable-free-mon=off
--enable-http-client=off
--js-engine=none
--opt=size
--separate-debug
--ssl=off
--use-system-mongo-c=on
DESTDIR='$BUILD_ROOT/mongo-embedded-sdk-$MONGO_VERSION'
CPPPATH='$BUILD_ROOT/mongo-embedded-sdk-$MONGO_VERSION/include/libbson-1.0 $BUILD_ROOT/mongo-embedded-sdk-$MONGO_VERSION/include/libmongoc-1.0'
task_compile_flags_extra: >-
--link-model=dynamic-sdk
- name: embedded_sdk_s3_put
commands:
# Not using archive.targz_pack here because I can't get it to work.
- command: shell.exec
params:
working_dir: "src/build"
script: |
set -o errexit
set -o verbose
cat <<EOF > mongo-embedded-sdk-${version}/README-Licenses.txt
The software accompanying this file is Copyright (C) 2018 MongoDB, Inc. and
is licensed to you on the terms set forth in the following files:
- mongo-c-driver: share/doc/mongo-c-driver/COPYING
- mongo_embedded: share/doc/mongo_embedded/LICENSE-Embedded.txt
- mongoc_embedded: share/doc/mongo_embedded/LICENSE-Embedded.txt
EOF
tar cfvz embedded-sdk.tgz mongo-embedded-sdk-${version}
# Upload it so we can download from EVG.
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/build/embedded-sdk.tgz"
remote_file: ${project}/embedded-sdk/${build_variant}/${revision}/mongo-embedded-sdk-${version}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: "Embedded SDK Tar Archive"
- name: embedded_sdk_install_tests
commands:
- func: "scons compile"
vars:
targets: install-embedded-test
compiling_for_test: true
task_compile_flags: *embedded_sdk_compile_flags
task_compile_flags_extra: >-
--link-model=dynamic
- name: embedded_sdk_tests_s3_put
commands:
# Not using archive.targz_pack here because I can't get it to work.
- command: shell.exec
params:
working_dir: "src/build"
script: |
set -o errexit
set -o verbose
tar cfvz embedded-sdk-tests.tgz mongo-embedded-sdk-${version}
# Upload it so we can download from EVG.
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/build/embedded-sdk-tests.tgz"
remote_file: ${project}/embedded-sdk-test/${build_variant}/${revision}/mongo-embedded-sdk-test-${version}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: "Embedded SDK Tests Tar Archive"
- name: embedded_sdk_run_tests
commands:
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o verbose
set -o errexit
${activate_virtualenv}
"build/mongo-embedded-sdk-${version}/bin/mongo_embedded_test"
"build/mongo-embedded-sdk-${version}/bin/mongoc_embedded_test"
# If this is a patch build, blow away the file so our subsequent and optional s3.put
# doesn't run. That way, we won't overwrite the latest part in our patches.
- command: shell.exec
params:
working_dir: "src/build"
script: |
set -o errexit
set -o verbose
if [ "${is_patch}" = "true" ]; then
rm -f src/build/embedded-sdk.tgz
fi
- name: embedded_sdk_s3_put_latest
commands:
# A second put, this time to -latest, to give devs a reasonable
# way to get the most recent build.
- command: s3.put
params:
visibility: none
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/build/embedded-sdk.tgz"
remote_file: ${project}/embedded-sdk/mongo-${build_variant}-latest.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/x-gzip}
- name: embedded_sdk_tests_s3_put_latest
commands:
# A second put, this time to -latest, to give devs a reasonable
# way to get the most recent build.
- command: s3.put
params:
visibility: none
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/build/embedded-sdk-tests.tgz"
remote_file: ${project}/embedded-sdk-test/mongo-${build_variant}-latest.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/x-gzip}
- name: stitch_support_create_lib
commands:
- func: "scons compile"
vars:
targets: install-stitch-support install-stitch-support-debug install-stitch-support-dev
task_compile_flags: >-
--dbg=off
--link-model=dynamic-sdk
--enable-free-mon=off
--ssl=off
--enable-http-client=off
--modules=
--separate-debug
DESTDIR='$BUILD_ROOT/stitch-support-lib-$MONGO_VERSION'
- command: shell.exec
params:
working_dir: "src/build"
script: |
set -o errexit
set -o verbose
tar cfvz stitch-support.tgz stitch-support-lib-${version}
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: "src/build/stitch-support.tgz"
remote_file: "${project}/stitch-support/${build_variant}/${revision}/stitch-support-${version}.tgz"
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: "Stitch Support Library"
- name: stitch_support_install_tests
commands:
- func: "scons compile"
vars:
targets: install-stitch-support-test
compiling_for_test: true
task_compile_flags: >-
--dbg=off
--enable-free-mon=off
--ssl=off
--enable-http-client=off
--modules=
--separate-debug
DESTDIR='$BUILD_ROOT/stitch-support-lib-$MONGO_VERSION'
- name: stitch_support_run_tests
commands:
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o errexit
set -o verbose
"build/stitch-support-lib-${version}/bin/stitch_support_test"
- name: compile_benchmarks
depends_on: []
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "get buildnumber"
- func: "set up credentials"
- func: "use WiredTiger develop" # noop if ${use_wt_develop} is not "true"
- func: "set up win mount script"
- func: "generate compile expansions"
# Then we load the generated version data into the agent so we can use it in task definitions
- func: "apply compile expansions"
- func: "scons compile"
vars:
targets: install-benchmarks
compiling_for_test: true
- command: archive.targz_pack
params:
target: "benchmarks.tgz"
source_dir: "src"
include:
- "./build/benchmarks.txt"
- "./build/**_bm"
- "./build/**_bm.gcno"
- "./build/**_bm.exe"
- "./build/**_bm.pdb"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: benchmarks.tgz
remote_file: ${project}/${build_variant}/${revision}/benchmarks/${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: Benchmarks
## lint ##
- name: lint_pylinters
commands:
- command: timeout.update
params:
# 40 minutes
exec_timeout_secs: 2400
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "scons lint"
vars:
targets: lint-pylinters
- name: lint_clang_format
commands:
- command: timeout.update
params:
# 40 minutes
exec_timeout_secs: 2400
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "scons lint"
vars:
targets: lint-clang-format
- name: lint_eslint
commands:
- command: timeout.update
params:
# 40 minutes
exec_timeout_secs: 2400
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "scons lint"
vars:
targets: lint-eslint
- name: lint_cpplint
commands:
- command: timeout.update
params:
# 40 minutes
exec_timeout_secs: 2400
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "scons lint"
vars:
targets: lint-lint.py
- name: lint_yaml
depends_on: []
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- command: shell.exec
type: test
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
find buildscripts etc jstests -name '*.y*ml' -exec yamllint -c etc/yamllint_config.yml {} +
- name: lint_errorcodes
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "scons lint"
vars:
targets: lint-errorcodes
- name: burn_in_tests_gen
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "configure evergreen api credentials"
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
# Capture a list of new and modified tests. The expansion macro burn_in_tests_build_variant
# is used to for finding the associated tasks from a different build varaint than the
# burn_in_tests_gen task executes on.
build_variant_opts="--build-variant=${build_variant}"
if [ -n "${burn_in_tests_build_variant|}" ]; then
build_variant_opts="--build-variant=${burn_in_tests_build_variant} --run-build-variant=${build_variant}"
fi
burn_in_args="$burn_in_args --repeat-tests-min=2 --repeat-tests-max=1000 --repeat-tests-secs=600"
# Evergreen executable is in $HOME.
PATH=$PATH:$HOME $python buildscripts/burn_in_tests.py --project=${project} $build_variant_opts --distro=${distro_id} --generate-tasks-file=burn_in_tests_gen.json $burn_in_args --verbose
- command: archive.targz_pack
params:
target: src/burn_in_tests_gen.tgz
source_dir: src
include:
- burn_in_tests_gen.json
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/burn_in_tests_gen.tgz
remote_file: ${project}/${build_variant}/${revision}/burn_in_tests_gen/burn_in_tests_gen-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Burn_in_tests Task Config - Execution ${execution}
- command: generate.tasks
params:
files:
- src/burn_in_tests_gen.json
- name: burn_in_tests_multiversion_gen
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "configure evergreen api credentials"
- func: "do multiversion setup"
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
# Capture a list of new and modified tests. The expansion macro burn_in_tests_build_variant
# is used for finding the associated tasks from a different build variant than the
# burn_in_tests_multiversion_gen task executes on.
build_variant_opts="--build-variant=${build_variant}"
if [ -n "${burn_in_tests_build_variant|}" ]; then
build_variant_opts="--build-variant=${burn_in_tests_build_variant} --run-build-variant=${build_variant}"
fi
burn_in_args="$burn_in_args"
# Evergreen executable is in $HOME.
PATH=$PATH:$HOME $python buildscripts/burn_in_tests.py --task_id=${task_id} --project=${project} $build_variant_opts --distro=${distro_id} --generate-tasks-file=burn_in_tests_multiversion_gen.json $burn_in_args --use-multiversion --verbose
- command: archive.targz_pack
params:
target: src/burn_in_tests_multiversion_gen.tgz
source_dir: src
include:
- burn_in_tests_multiversion_gen.json
- command: archive.targz_pack
params:
target: generate_tasks_config.tgz
source_dir: src/generated_resmoke_config
include:
- "*"
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: generate_tasks_config.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/burn_in_tests_multiversion_gen-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
optional: true
display_name: Generated Multiversion Resmoke.py Suite Config - Execution ${execution}
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/burn_in_tests_multiversion_gen.tgz
remote_file: ${project}/${build_variant}/${revision}/generate_tasks/burn_in_tests_multiversion_gen_config-${build_id}.tgz
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Burn_in_tests Task Config - Execution ${execution}
- command: generate.tasks
params:
files:
- src/burn_in_tests_multiversion_gen.json
- <<: *benchmark_template
name: benchmarks_orphaned
tags: ["benchmarks"]
commands:
- func: "do benchmark setup"
- func: "run tests"
vars:
resmoke_args: --suites=benchmarks
resmoke_jobs_max: 1
- func: "send benchmark results"
- <<: *benchmark_template
name: benchmarks_sharding
tags: ["benchmarks"]
commands:
- func: "do benchmark setup"
- func: "run tests"
vars:
resmoke_args: --suites=benchmarks_sharding
resmoke_jobs_max: 1
- func: "send benchmark results"
- <<: *run_jepsen_template
name: jepsen_register_findAndModify
tags: ["jepsen"]
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
jepsen_read_with_find_and_modify: --read-with-find-and-modify
jepsen_storage_engine: --storage-engine wiredTiger
jepsen_test_name: register
- <<: *run_jepsen_template
name: jepsen_register_linearizableRead
tags: ["jepsen"]
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
jepsen_read_concern: --read-concern linearizable
jepsen_storage_engine: --storage-engine wiredTiger
jepsen_test_name: register
- <<: *run_jepsen_template
name: jepsen_set_linearizableRead
tags: ["jepsen"]
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
jepsen_read_concern: --read-concern linearizable
jepsen_storage_engine: --storage-engine wiredTiger
jepsen_test_name: set
- <<: *run_jepsen_template
name: jepsen_read-concern-majority
tags: ["jepsen"]
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
jepsen_storage_engine: --storage-engine wiredTiger
jepsen_test_name: read-concern-majority
- <<: *run_jepsen_template
name: jepsen_read-concern-majority_w1
tags: ["jepsen"]
commands:
- func: "do setup"
- func: "do jepsen setup"
- func: "run jepsen test"
vars:
<<: *jepsen_config_vars
jepsen_storage_engine: --storage-engine wiredTiger
jepsen_test_name: read-concern-majority
jepsen_write_concern: --write-concern w1
## initial sync multiversion fuzzer ##
- <<: *jstestfuzz_template
name: initial_sync_multiversion_fuzzer_gen
tags: ["multiversion_fuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 20
num_tasks: 1
npm_command: initsync-fuzzer
suite: initial_sync_multiversion_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: initial_sync_multiversion_fuzzer
task_path_suffix: "/data/multiversion"
## initial sync generational fuzzer ##
- <<: *jstestfuzz_template
name: initial_sync_fuzzer_gen
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 20
num_tasks: 1
npm_command: initsync-fuzzer
suite: initial_sync_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: initial_sync_fuzzer
## Standalone generational fuzzer for multiversion aggregation pipelines ##
- <<: *jstestfuzz_template
name: aggregation_multiversion_fuzzer_gen
tags: ["aggfuzzer", "common", "multiversion"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
npm_command: agg-fuzzer
name: aggregation_multiversion_fuzzer
task_path_suffix: "/data/multiversion"
## Standalone generational fuzzer for multiversion aggregation expressions ##
- <<: *jstestfuzz_template
name: aggregation_expression_multiversion_fuzzer_gen
tags: ["aggfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 15
num_tasks: 1
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
npm_command: agg-expr-fuzzer
name: aggregation_expression_multiversion_fuzzer
task_path_suffix: "/data/multiversion"
## Standalone generational fuzzer for checking optimized and unoptimized expression equivalence
- <<: *jstestfuzz_template
name: aggregation_expression_optimization_fuzzer_gen
tags: ["aggfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --diffTestingMode optimization
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
npm_command: agg-expr-fuzzer
name: aggregation_expression_optimization_fuzzer
## Standalone generational fuzzer for checking optimized and unoptimized aggregation pipelines
- <<: *jstestfuzz_template
name: aggregation_optimization_fuzzer_gen
tags: ["aggfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --diffTestingMode optimization
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
npm_command: agg-fuzzer
name: aggregation_optimization_fuzzer
## Standalone fuzzer for checking wildcard index correctness ##
- <<: *jstestfuzz_template
name: aggregation_wildcard_fuzzer_gen
tags: ["aggfuzzer", "common", "wildcard"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --diffTestingMode wildcard
npm_command: agg-fuzzer
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: aggregation_wildcard_fuzzer
## jstestfuzz standalone fuzzer for checking find and aggregate equivalence ##
- <<: *jstestfuzz_template
name: query_fuzzer_standalone_gen
tags: ["query_fuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --diffTestingMode standalone
npm_command: query-fuzzer
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: query_fuzzer_standalone
## jstestfuzz sharded fuzzer for checking find and aggregate equivalence ##
- <<: *jstestfuzz_template
name: query_fuzzer_sharded_gen
tags: ["query_fuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --diffTestingMode sharded
npm_command: query-fuzzer
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: query_fuzzer_sharded
## jstestfuzz standalone update generational fuzzer ##
- <<: *jstestfuzz_template
name: update_fuzzer_gen
tags: ["updatefuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
npm_command: update-fuzzer
suite: generational_fuzzer
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: update_fuzzer
task_path_suffix: "/data/multiversion"
## jstestfuzz replication update generational fuzzer ##
- <<: *jstestfuzz_template
name: update_fuzzer_replication_gen
tags: ["updatefuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
npm_command: update-fuzzer
suite: generational_fuzzer_replication
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: update_fuzzer_replication
task_path_suffix: "/data/multiversion"
## rollback multiversion fuzzer ##
- <<: *jstestfuzz_template
name: rollback_multiversion_fuzzer_gen
tags: ["multiversion_fuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 5
num_tasks: 1
npm_command: rollback-fuzzer
suite: rollback_multiversion_fuzzer
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
# index build commit quorum.
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}, enableIndexBuildCommitQuorum: false}'"
name: rollback_multiversion_fuzzer
task_path_suffix: "/data/multiversion"
## rollback generational fuzzer ##
- <<: *jstestfuzz_template
name: rollback_fuzzer_gen
tags: ["rollbackfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 8
num_tasks: 1
npm_command: rollback-fuzzer
suite: rollback_fuzzer
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
# index build commit quorum.
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}, enableIndexBuildCommitQuorum: false}'"
name: rollback_fuzzer
## rollback generational fuzzer with clean shutdowns ##
- <<: *jstestfuzz_template
name: rollback_fuzzer_clean_shutdowns_gen
tags: ["rollbackfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50
npm_command: rollback-fuzzer
suite: rollback_fuzzer_clean_shutdowns
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
# index build commit quorum.
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}, enableIndexBuildCommitQuorum: false}'"
name: rollback_fuzzer_clean_shutdowns
## rollback generational fuzzer with unclean shutdowns ##
- <<: *jstestfuzz_template
name: rollback_fuzzer_unclean_shutdowns_gen
tags: ["rollbackfuzzer"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50
npm_command: rollback-fuzzer
suite: rollback_fuzzer_unclean_shutdowns
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
# index build commit quorum.
resmoke_args: "--mongodSetParameters='{logComponentVerbosity: {command: 2}, enableIndexBuildCommitQuorum: false}'"
name: rollback_fuzzer_unclean_shutdowns
## jstestfuzz ##
- <<: *jstestfuzz_template
name: jstestfuzz_gen
tags: ["jstestfuzz", "common"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
npm_command: jstestfuzz
name: jstestfuzz
## jstestfuzz concurrent ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_gen
tags: ["jstestfuzz", "common"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent
## jstestfuzz concurrent replica set ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_replication_gen
tags: ["jstestfuzz", "common", "repl"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_replication
## jstestfuzz concurrent replica set with logical session ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_replication_session_gen
tags: ["jstestfuzz", "session"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication_session
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_replication_session
## jstestfuzz concurrent sharded cluster ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_sharded_gen
tags: ["jstestfuzz", "common"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_sharded
## jstestfuzz concurrent sharded cluster causal consistency ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_sharded_causal_consistency_gen
tags: ["jstestfuzz", "causal"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_causal_consistency
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_sharded_causal_consistency
## jstestfuzz concurrent sharded cluster continuous stepdown ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_sharded_continuous_stepdown_gen
tags: ["jstestfuzz", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_continuous_stepdown
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_sharded_continuous_stepdown
## jstestfuzz concurrent sharded cluster with logical session ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_sharded_session_gen
tags: ["jstestfuzz", "session"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: ${jstestfuzz_concurrent_num_files|10}
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_session
resmoke_args: --storageEngine=wiredTiger --numClientsPerFixture=10
name: jstestfuzz_concurrent_sharded_session
# jstestfuzz interrupt #
- <<: *jstestfuzz_template
name: jstestfuzz_interrupt_gen
tags: ["jstestfuzz", "interrupt"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_interrupt
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_interrupt
# jstestfuzz interrupt #
- <<: *jstestfuzz_template
name: jstestfuzz_interrupt_replication_gen
tags: ["jstestfuzz", "interrupt"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_interrupt_replication
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_interrupt_replication
# jstestfuzz interrupt with flow control engaged #
- <<: *jstestfuzz_template
name: jstestfuzz_interrupt_replication_flow_control_gen
tags: ["jstestfuzz", "interrupt", "flow_control"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_interrupt_replication
resmoke_args: "--flowControlTicketOverride=1 --storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_interrupt_replication_flow_control
## jstestfuzz sharded cluster continuous stepdown with flow control engaged ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_continuous_stepdown_flow_control_gen
tags: ["jstestfuzz", "flow_control", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_continuous_stepdown
resmoke_args: >-
--flowControlTicketOverride=3
--storageEngine=wiredTiger
--mongodSetParameters="{logComponentVerbosity: {command: 2}}"
name: jstestfuzz_sharded_continuous_stepdown_flow_control
## jstestfuzz concurrent sharded cluster continuous stepdown with flow control engaged ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_sharded_continuous_stepdown_flow_control_gen
tags: ["jstestfuzz", "flow_control", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_continuous_stepdown
resmoke_args: >-
--flowControlTicketOverride=30
--storageEngine=wiredTiger
--numClientsPerFixture=10
name: jstestfuzz_concurrent_sharded_continuous_stepdown_flow_control
# jstestfuzz replication continuous stepdown with flow control engaged #
- <<: *jstestfuzz_template
name: jstestfuzz_replication_continuous_stepdown_flow_control_gen
tags: ["jstestfuzz", "repl", "flow_control", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication_continuous_stepdown
resmoke_args: >-
--flowControlTicketOverride=1
--storageEngine=wiredTiger
--mongodSetParameters="{logComponentVerbosity: {command: 2}}"
name: jstestfuzz_replication_continuous_stepdown_flow_control
## jstestfuzz concurrent replication continuous stepdown with flow control engaged ##
- <<: *jstestfuzz_template
name: jstestfuzz_concurrent_replication_continuous_stepdown_flow_control_gen
tags: ["jstestfuzz", "repl", "flow_control", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 2
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication_continuous_stepdown
resmoke_args: >-
--flowControlTicketOverride=10
--storageEngine=wiredTiger
--numClientsPerFixture=10
name: jstestfuzz_concurrent_replication_continuous_stepdown_flow_control
## jstestfuzz replica set ##
- <<: *jstestfuzz_template
name: jstestfuzz_replication_gen
tags: ["jstestfuzz", "common", "repl"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_replication
## jstestfuzz replica set ##
- <<: *jstestfuzz_template
name: jstestfuzz_replication_multiversion_gen
tags: ["jstestfuzz_multiversion_gen"]
commands:
- func: "generate multiversion tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
suite: jstestfuzz_replication
is_jstestfuzz: true
use_multiversion: /data/multiversion
npm_command: jstestfuzz
## jstestfuzz initial sync replica set ##
- <<: *jstestfuzz_template
name: jstestfuzz_replication_initsync_gen
tags: ["jstestfuzz", "initsync"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 15
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication_initsync
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_replication_initsync
## jstestfuzz replica set with logical session ##
- <<: *jstestfuzz_template
name: jstestfuzz_replication_session_gen
tags: ["jstestfuzz", "session"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_replication_session
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_replication_session
## jstestfuzz sharded cluster ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_gen
tags: ["jstestfuzz", "common"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_sharded
## jstestfuzz sharded multiversion cluster ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_multiversion_gen
tags: [jstestfuzz_multiversion_gen]
commands:
- func: "generate multiversion tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
suite: jstestfuzz_sharded
is_jstestfuzz: true
use_multiversion: /data/multiversion
npm_command: jstestfuzz
## jstestfuzz sharded cluster causal consistency ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_causal_consistency_gen
tags: ["jstestfuzz", "causal"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_causal_consistency
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_sharded_causal_consistency
## jstestfuzz sharded cluster continuous stepdown ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_continuous_stepdown_gen
tags: ["jstestfuzz", "stepdowns"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
num_files: 10
num_tasks: 1
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_continuous_stepdown
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_sharded_continuous_stepdown
## jstestfuzz sharded cluster with logical session ##
- <<: *jstestfuzz_template
name: jstestfuzz_sharded_session_gen
tags: ["jstestfuzz", "session"]
commands:
- func: "generate fuzzer tasks"
vars:
<<: *jstestfuzz_config_vars
jstestfuzz_vars: --jsTestsDir ../jstests
suite: jstestfuzz_sharded_session
resmoke_args: "--storageEngine=wiredTiger --mongodSetParameters='{logComponentVerbosity: {command: 2}}'"
name: jstestfuzz_sharded_session
## Tests that the multiversion test generation logic is not broken.
- name: multiversion_sanity_check_gen
commands:
- func: "generate multiversion tasks"
vars:
suite: replica_sets_jscore_passthrough
resmoke_args: --storageEngine=wiredTiger --includeWithAnyTags=multiversion_sanity_check
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 1
- name: replica_sets_jscore_multiversion_passthrough_gen
tags: ["multiversion_passthrough"]
commands:
- func: "generate multiversion tasks"
vars:
suite: replica_sets_jscore_passthrough
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
# Check that the mutational fuzzer can parse JS files modified in a patch build.
- name: lint_fuzzer_sanity_patch
patch_only: true
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "get added and modified patch files"
- func: "setup jstestfuzz"
- func: "lint fuzzer sanity patch"
# Check that the mutational fuzzer can parse all JS filess.
- name: lint_fuzzer_sanity_all
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "setup jstestfuzz"
- func: "lint fuzzer sanity all"
## integration test suites ##
- <<: *task_template
name: aggregation
tags: ["aggregation", "common"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_disabled_optimization
tags: ["aggregation", "common"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_disabled_optimization --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_ese
tags: ["aggregation", "encrypt"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_ese --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_ese_gcm
tags: ["aggregation", "encrypt", "gcm"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_ese_gcm --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_auth
tags: ["aggregation", "auth", "common"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_auth --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_facet_unwind_passthrough
tags: ["aggregation", "unwind"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_facet_unwind_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_mongos_passthrough
tags: ["aggregation", "no_async"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_mongos_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_one_shard_sharded_collections
tags: ["aggregation", "no_async"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_one_shard_sharded_collections --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_read_concern_majority_passthrough
tags: ["aggregation", "read_write_concern"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_read_concern_majority_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_secondary_reads
tags: ["aggregation", "secondary_reads"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_secondary_reads --storageEngine=wiredTiger
- <<: *task_template
name: aggregation_sharded_collections_passthrough
tags: ["aggregation", "common"]
depends_on:
- name: aggregation
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=aggregation_sharded_collections_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: audit
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=audit --storageEngine=wiredTiger
- name: auth_gen
tags: ["auth"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 4
- name: burn_in_tags_gen
depends_on:
- name: compile
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
vars:
pip_dir: ${workdir}/src/etc/pip
- func: "upload pip requirements"
- func: "do multiversion setup"
- func: "generate burn in tags"
vars:
max_revisions: 25
repeat_tests_secs: 600
repeat_tests_min: 2
repeat_tests_max: 1000
- name: selected_tests_gen
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
vars:
pip_dir: ${workdir}/src/etc/pip
- func: "upload pip requirements"
- func: "generate selected tests"
- name: auth_audit_gen
tags: ["auth", "audit"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 4
- <<: *task_template
name: change_streams
tags: ["change_streams"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_multiversion_gen
tags: ["multiversion_passthrough"]
commands:
- func: "generate multiversion tasks"
vars:
suite: change_streams
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: change_streams_mongos_sessions_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_mongos_sessions_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_mongos_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_mongos_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_secondary_reads
tags: ["change_streams", "secondary_reads"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_secondary_reads --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_sharded_collections_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_sharded_collections_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_sharded_collections_multiversion_passthrough_gen
tags: ["multiversion_passthrough"]
depends_on:
- name: change_streams_multiversion_gen
commands:
- func: "generate multiversion tasks"
vars:
suite: change_streams_sharded_collections_passthrough
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: change_streams_whole_db_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_db_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_db_mongos_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams_mongos_passthrough
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_db_mongos_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_db_secondary_reads_passthrough
tags: ["change_streams", "secondary_reads"]
depends_on:
- name: change_streams_secondary_reads
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_db_secondary_reads_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_db_sharded_collections_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams_sharded_collections_passthrough
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_db_sharded_collections_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_cluster_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_cluster_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_cluster_mongos_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams_mongos_passthrough
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_cluster_mongos_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_cluster_secondary_reads_passthrough
tags: ["change_streams", "secondary_reads"]
depends_on:
- name: change_streams_secondary_reads
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_cluster_secondary_reads_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_whole_cluster_sharded_collections_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams_sharded_collections_passthrough
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_whole_cluster_sharded_collections_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_multi_stmt_txn_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_multi_stmt_txn_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_multi_stmt_txn_mongos_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_multi_stmt_txn_mongos_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: change_streams_multi_stmt_txn_sharded_collections_passthrough
tags: ["change_streams"]
depends_on:
- name: change_streams
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=change_streams_multi_stmt_txn_sharded_collections_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: disk_wiredtiger
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=disk_wiredtiger --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: ese
tags: ["encrypt"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=ese --storageEngine=wiredTiger
- <<: *task_template
name: failpoints
tags: ["misc_js"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=failpoints --storageEngine=wiredTiger
- <<: *task_template
name: failpoints_auth
tags: ["auth"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=failpoints_auth --storageEngine=wiredTiger
- <<: *task_template
name: gle_auth
tags: ["auth", "gle"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=gle_auth --shellWriteMode=legacy --shellReadMode=legacy --excludeWithAnyTags=requires_find_command --storageEngine=wiredTiger
- <<: *task_template
name: gle_auth_write_cmd
tags: ["auth", "gle"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=gle_auth --shellWriteMode=commands --storageEngine=wiredTiger
- <<: *task_template
name: gle_auth_basics_passthrough
tags: ["auth", "gle"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=gle_auth_basics_passthrough --shellWriteMode=legacy --shellReadMode=legacy --storageEngine=wiredTiger --excludeWithAnyTags=requires_find_command
- <<: *task_template
name: gle_auth_basics_passthrough_write_cmd
tags: ["auth", "gle"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=gle_auth_basics_passthrough --shellWriteMode=commands --storageEngine=wiredTiger
- <<: *task_template
name: integration_tests_standalone
tags: ["integration", "standalone"]
commands:
- command: manifest.load
- func: "git get project"
- func: "do setup"
- func: "set up win mount script"
- func: "generate compile expansions" # Generate compile expansions needs to be run to mount the shared scons cache.
- func: "apply compile expansions"
- func: "scons compile"
vars:
targets: install-integration-tests
compiling_for_test: true
bypass_compile: false
task_compile_flags: >-
--detect-odr-violations
- func: "run tests"
vars:
resmoke_args: --suites=integration_tests_standalone --storageEngine=wiredTiger
- <<: *task_template
name: integration_tests_standalone_audit
tags: ["integration", "audit"]
commands:
- command: manifest.load
- func: "git get project"
- func: "do setup"
- func: "set up win mount script"
- func: "generate compile expansions" # Generate compile expansions needs to be run to mount the shared scons cache.
- func: "apply compile expansions"
- func: "scons compile"
vars:
targets: install-integration-tests
compiling_for_test: true
bypass_compile: false
task_compile_flags: >-
--detect-odr-violations
- func: "run tests"
vars:
resmoke_args: --suites=integration_tests_standalone_audit --storageEngine=wiredTiger
- <<: *task_template
name: integration_tests_replset
tags: ["integration"]
commands:
- command: manifest.load
- func: "git get project"
- func: "do setup"
- func: "set up win mount script"
- func: "generate compile expansions" # Generate compile expansions needs to be run to mount the shared scons cache.
- func: "apply compile expansions"
- func: "scons compile"
vars:
targets: install-integration-tests
compiling_for_test: true
bypass_compile: false
task_compile_flags: >-
--detect-odr-violations
- func: "run tests"
vars:
resmoke_args: --suites=integration_tests_replset --storageEngine=wiredTiger
- <<: *task_template
name: integration_tests_sharded
tags: ["integration", "sharded"]
commands:
- command: manifest.load
- func: "git get project"
- func: "do setup"
- func: "set up win mount script"
- func: "generate compile expansions" # Generate compile expansions needs to be run to mount the shared scons cache.
- func: "apply compile expansions"
- func: "scons compile"
vars:
targets: install-integration-tests
compiling_for_test: true
bypass_compile: false
task_compile_flags: >-
--detect-odr-violations
- func: "run tests"
vars:
resmoke_args: --suites=integration_tests_sharded --storageEngine=wiredTiger
- <<: *task_template
name: external_auth
commands:
- func: "do setup"
- command: shell.exec
params:
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
# Not all git get project calls clone into ${workdir}/src so we allow
# callers to tell us where the pip requirements files are.
pip_dir="${pip_dir}"
if [[ -z $pip_dir ]]; then
# Default to most common location
pip_dir="${workdir}/src/etc/pip"
fi
# Same as above we have to use quotes to preserve the
# Windows path separator
external_auth_txt="$pip_dir/external-auth-requirements.txt"
python -m pip install -r "$external_auth_txt"
- func: "run tests"
vars:
resmoke_args: --suites=external_auth --excludeWithAnyTags=requires_domain_controller --storageEngine=wiredTiger
- <<: *task_template
name: external_auth_aws
commands:
- func: "do setup"
- command: shell.exec
params:
working_dir: src
silent: true
shell: bash
script: |
set -o errexit
cat <<EOF > aws_e2e_setup.json
{
"iam_auth_ecs_account" : "${iam_auth_ecs_account}",
"iam_auth_ecs_secret_access_key" : "${iam_auth_ecs_secret_access_key}",
"iam_auth_ecs_account_arn": "arn:aws:iam::557821124784:user/authtest_fargate_user",
"iam_auth_ecs_cluster": "${iam_auth_ecs_cluster}",
"iam_auth_ecs_task_definition": "${iam_auth_ecs_task_definition}",
"iam_auth_ecs_subnet_a": "${iam_auth_ecs_subnet_a}",
"iam_auth_ecs_subnet_b": "${iam_auth_ecs_subnet_b}",
"iam_auth_ecs_security_group": "${iam_auth_ecs_security_group}",
"iam_auth_assume_aws_account" : "${iam_auth_assume_aws_account}",
"iam_auth_assume_aws_secret_access_key" : "${iam_auth_assume_aws_secret_access_key}",
"iam_auth_assume_role_name" : "${iam_auth_assume_role_name}",
"iam_auth_ec2_instance_account" : "${iam_auth_ec2_instance_account}",
"iam_auth_ec2_instance_secret_access_key" : "${iam_auth_ec2_instance_secret_access_key}",
"iam_auth_ec2_instance_profile" : "${iam_auth_ec2_instance_profile}"
}
EOF
- command: shell.exec
params:
shell: bash
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
# Not all git get project calls clone into ${workdir}/src so we allow
# callers to tell us where the pip requirements files are.
pip_dir="${pip_dir}"
if [[ -z $pip_dir ]]; then
# Default to most common location
pip_dir="${workdir}/src/etc/pip"
fi
# Same as above we have to use quotes to preserve the
# Windows path separator
external_auth_txt="$pip_dir/components/aws.req"
python -m pip install -r "$external_auth_txt"
- func: "run tests"
vars:
resmoke_args: --suites=external_auth_aws --storageEngine=wiredTiger
- <<: *task_template
name: external_auth_windows
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=external_auth --includeWithAnyTags=requires_domain_controller --storageEngine=wiredTiger
- <<: *task_template
name: sharding_gle_auth_basics_passthrough
tags: ["auth", "gle"]
depends_on:
- name: gle_auth_basics_passthrough
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharding_gle_auth_basics_passthrough --shellWriteMode=legacy --shellReadMode=legacy --storageEngine=wiredTiger --excludeWithAnyTags=requires_find_command
- <<: *task_template
name: sharding_gle_auth_basics_passthrough_write_cmd
tags: ["auth", "gle"]
depends_on:
- name: gle_auth_basics_passthrough_write_cmd
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharding_gle_auth_basics_passthrough --shellWriteMode=commands --storageEngine=wiredTiger
- <<: *task_template
name: jsCore
tags: ["jscore", "common"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_ese
tags: ["jscore", "encrypt"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_ese --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_ese_gcm
tags: ["jscore", "encrypt", "gcm"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_ese_gcm --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_compatibility
tags: ["jscore", "common", "compat"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core --shellReadMode=legacy --shellWriteMode=compatibility --storageEngine=wiredTiger --excludeWithAnyTags=requires_find_command
- <<: *task_template
name: jsCore_auth
tags: ["jscore", "auth", "common"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_auth
- <<: *task_template
name: jsCore_minimum_batch_size
tags: ["jscore"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_minimum_batch_size --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_op_query
tags: ["jscore"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_op_query --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_txns
tags: ["jscore", "common", "txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_txns --storageEngine=wiredTiger
- <<: *task_template
name: jsCore_txns_large_txns_format
tags: ["jscore", "txns", "multi_oplog"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=core_txns_large_txns_format --storageEngine=wiredTiger
- <<: *task_template
name: sharded_jscore_txns
tags: ["sharding", "jscore", "txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_jscore_txns --storageEngine=wiredTiger
- <<: *task_template
name: sharded_jscore_txns_without_snapshot
tags: ["sharding", "wo_snapshot", "jscore"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_jscore_txns --storageEngine=wiredTiger --excludeWithAnyTags=uses_snapshot_read_concern
- <<: *task_template
name: sharded_jscore_txns_sharded_collections
tags: ["sharding", "jscore", "txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_jscore_txns_sharded_collections --storageEngine=wiredTiger
- <<: *task_template
name: libunwind_tests
tags: []
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=libunwind --storageEngine=wiredTiger
- <<: *task_template
name: causally_consistent_jscore_txns_passthrough
tags: ["causally_consistent"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=causally_consistent_jscore_txns_passthrough --storageEngine=wiredTiger
- name: sharded_causally_consistent_jscore_txns_passthrough_gen
tags: ["sharding", "jscore", "causally_consistent", "txns"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 1
- name: sharded_causally_consistent_jscore_txns_passthrough_without_snapshot_gen
tags: ["sharding", "wo_snapshot", "causally_consistent", "jscore"]
commands:
- func: "generate resmoke tasks"
vars:
suite: sharded_causally_consistent_jscore_txns_passthrough
resmoke_args: --storageEngine=wiredTiger --excludeWithAnyTags=uses_snapshot_read_concern
fallback_num_sub_suites: 1
- <<: *task_template
name: sharded_collections_causally_consistent_jscore_txns_passthrough
tags: ["sharding", "jscore", "causally_consistent", "txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_collections_causally_consistent_jscore_txns_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_jscore_passthrough
tags: ["replica_sets", "common", "san", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_reconfig_jscore_passthrough
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_reconfig_jscore_passthrough --storageEngine=wiredTiger
- name: replica_sets_jscore_passthrough_gen
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 1
- <<: *task_template
name: replica_sets_large_txns_format_jscore_passthrough
tags: ["replica_sets", "multi_oplog", "large", "non_maj_read", "san"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_large_txns_format_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_multi_stmt_txn_jscore_passthrough
tags: ["replica_sets", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_multi_stmt_txn_jscore_passthrough --storageEngine=wiredTiger
- name: replica_sets_multi_stmt_txn_stepdown_jscore_passthrough_gen
tags: ["replica_sets", "non_maj_read"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- <<: *task_template
name: replica_sets_multi_stmt_txn_kill_primary_jscore_passthrough
tags: ["replica_sets", "non_maj_read"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_multi_stmt_txn_kill_primary_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_multi_stmt_txn_terminate_primary_jscore_passthrough
tags: ["replica_sets", "non_maj_read"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_multi_stmt_txn_terminate_primary_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_initsync_jscore_passthrough
tags: ["replica_sets", "san", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_initsync_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_initsync_static_jscore_passthrough
tags: ["replica_sets", "san", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_initsync_static_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_kill_primary_jscore_passthrough
tags: ["replica_sets", "large", "non_maj_read"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_kill_primary_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_terminate_primary_jscore_passthrough
tags: ["replica_sets", "large", "non_maj_read"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_terminate_primary_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: replica_sets_kill_secondaries_jscore_passthrough
tags: ["replica_sets", "san", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets_kill_secondaries_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: mongosTest
tags: ["misc_js", "non_read_maj"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=mongos_test
- name: multiversion_auth_gen
tags: ["auth", "multiversion"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: "--storageEngine=wiredTiger"
use_multiversion: /data/multiversion
fallback_num_sub_suites: 1
- name: multiversion_gen
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: "--storageEngine=wiredTiger"
use_multiversion: /data/multiversion
fallback_num_sub_suites: 1
- name: unittest_shell_hang_analyzer_gen
commands:
- func: "generate resmoke tasks"
vars:
suite: unittest_shell_hang_analyzer
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 1
- name: noPassthrough_gen
tags: ["misc_js"]
commands:
- func: "generate resmoke tasks"
vars:
suite: no_passthrough
resmoke_args: --storageEngine=wiredTiger
use_large_distro: "true"
fallback_num_sub_suites: 12
- name: noPassthroughWithMongod_gen
tags: ["misc_js"]
commands:
- func: "generate resmoke tasks"
vars:
suite: no_passthrough_with_mongod
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- <<: *task_template
name: bulk_gle_passthrough
tags: ["auth", "gle"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=bulk_gle_passthrough --storageEngine=wiredTiger
- name: slow1_gen
tags: ["misc_js", "non_win_dbg"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 4
resmoke_jobs_max: 1
- <<: *task_template
name: serial_run
tags: ["misc_js", "non_win_dbg"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=serial_run --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: sharded_collections_jscore_passthrough
tags: ["sharding", "jscore"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_collections_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: sharded_collections_jscore_multiversion_passthrough_gen
tags: ["multiversion_passthrough"]
commands:
- func: "generate multiversion tasks"
vars:
suite: sharded_collections_jscore_passthrough
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: sharding_jscore_passthrough
tags: ["sharding", "jscore", "common"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharding_jscore_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: sharding_jscore_multiversion_passthrough_gen
tags: ["multiversion_passthrough"]
commands:
- func: "generate multiversion tasks"
vars:
suite: sharding_jscore_passthrough
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: sharding_jscore_op_query_passthrough
tags: ["sharding", "jscore"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharding_jscore_op_query_passthrough --storageEngine=wiredTiger
- name: sharding_jscore_passthrough_wire_ops_gen
tags: ["sharding", "jscore"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
suite: sharding_jscore_passthrough
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger --shellReadMode=legacy --shellWriteMode=compatibility --excludeWithAnyTags=requires_find_command
fallback_num_sub_suites: 11
- <<: *task_template
name: sharded_multi_stmt_txn_jscore_passthrough
tags: ["sharding", "jscore", "multi_stmt"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sharded_multi_stmt_txn_jscore_passthrough --storageEngine=wiredTiger
- name: multi_shard_multi_stmt_txn_jscore_passthrough_gen
tags: ["multi_shard", "multi_stmt", "common"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 28
resmoke_jobs_max: 0 # No cap on number of jobs.
- name: multi_shard_local_read_write_multi_stmt_txn_jscore_passthrough_gen
tags: ["multi_shard", "common"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 21
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
tags: ["multi_stmt"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 19
- name: multi_shard_multi_stmt_txn_kill_primary_jscore_passthrough_gen
tags: ["multi_shard"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 48
- name: multi_shard_multi_stmt_txn_stepdown_primary_jscore_passthrough_gen
tags: ["multi_shard"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 37
- name: parallel_gen
tags: ["misc_js"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 2
resmoke_jobs_max: 1
- <<: *task_template
name: parallel_compatibility
tags: ["misc_js"]
depends_on:
- name: jsCore_compatibility
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=parallel --shellReadMode=legacy --shellWriteMode=compatibility --storageEngine=wiredTiger --excludeWithAnyTags=requires_find_command
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency
tags: ["concurrency", "common"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication
tags: ["concurrency", "common", "repl"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_replication --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_multiversion_gen
tags: [multiversion_passthrough]
commands:
- func: "generate multiversion tasks"
vars:
suite: concurrency_replication
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: concurrency_replication_causal_consistency
tags: ["concurrency", "repl", "large"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_replication_causal_consistency --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_multi_stmt_txn
tags: ["concurrency", "common", "repl", "txn"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_replication_multi_stmt_txn --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
# TODO: SERVER-35964 revert the addition of UBSAN concurrency_replication suites.
- <<: *task_template
name: concurrency_replication_ubsan
tags: ["concurrency", "ubsan"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_replication_ubsan --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_causal_consistency_ubsan
tags: ["concurrency", "ubsan"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_replication_causal_consistency_ubsan --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_multi_stmt_txn_ubsan
tags: ["concurrency", "ubsan"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_replication_multi_stmt_txn_ubsan --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_wiredtiger_cursor_sweeps
tags: ["concurrency", "repl"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_replication_wiredtiger_cursor_sweeps --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_replication_wiredtiger_eviction_debug
tags: ["concurrency", "repl", "debug_only"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_replication_wiredtiger_eviction_debug --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_replication
tags: ["concurrency", "common", "read_concern_maj"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_replication --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_replication_multiversion_gen
tags: [multiversion_passthrough]
commands:
- func: "generate multiversion tasks"
vars:
suite: concurrency_sharded_replication
resmoke_args: --storageEngine=wiredTiger
task_path_suffix: /data/multiversion
fallback_num_sub_suites: 4
- <<: *task_template
name: concurrency_sharded_replication_with_balancer
tags: ["concurrency", "common", "read_concern_maj"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_replication_with_balancer --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_replication_no_txns
tags: ["concurrency", "no_txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_replication --excludeWithAnyTags=uses_transactions --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_replication_no_txns_with_balancer
tags: ["concurrency", "no_txns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_replication_with_balancer --excludeWithAnyTags=uses_transactions --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_clusterwide_ops_add_remove_shards
tags: ["concurrency", "common", "read_concern_maj"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_sharded_clusterwide_ops_add_remove_shards --storageEngine=wiredTiger
resmoke_jobs_max: 1
- name: concurrency_sharded_causal_consistency_gen
tags: ["concurrency"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 3
use_large_distro: "true"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_causal_consistency_and_balancer
tags: ["concurrency"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_causal_consistency_and_balancer --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_with_stepdowns
tags: ["concurrency", "stepdowns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_with_stepdowns --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_with_stepdowns_and_balancer
tags: ["concurrency", "stepdowns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_with_stepdowns_and_balancer --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_terminate_primary_with_balancer
tags: ["concurrency", "stepdowns", "kill_terminate"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_terminate_primary_with_balancer --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_kill_primary_with_balancer
tags: ["concurrency", "stepdowns", "kill_terminate"]
commands:
- func: "do setup"
- func: "run tests"
vars:
# TODO SERVER-46594: Enables commit quorum for concurrency suites.
resmoke_args: "--suites=concurrency_sharded_kill_primary_with_balancer --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_multi_stmt_txn
tags: ["concurrency"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_sharded_multi_stmt_txn --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_multi_stmt_txn_with_balancer
tags: ["concurrency"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_sharded_multi_stmt_txn_with_balancer --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_local_read_write_multi_stmt_txn
tags: ["concurrency"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_sharded_local_read_write_multi_stmt_txn --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_local_read_write_multi_stmt_txn_with_balancer
tags: ["concurrency"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_sharded_local_read_write_multi_stmt_txn_with_balancer --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_multi_stmt_txn_with_stepdowns
tags: ["concurrency", "stepdowns"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_sharded_multi_stmt_txn_with_stepdowns --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_multi_stmt_txn_terminate_primary
tags: ["concurrency", "stepdowns", "kill_terminate"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_sharded_multi_stmt_txn_terminate_primary --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_sharded_multi_stmt_txn_kill_primary
tags: ["concurrency", "stepdowns", "kill_terminate"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_sharded_multi_stmt_txn_kill_primary --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_simultaneous
tags: ["concurrency", "common"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=concurrency_simultaneous --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_simultaneous_replication
tags: ["concurrency", "common"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_simultaneous_replication --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_simultaneous_replication_wiredtiger_cursor_sweeps
tags: ["concurrency", "repl"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_simultaneous_replication_wiredtiger_cursor_sweeps --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: concurrency_simultaneous_replication_wiredtiger_eviction_debug
tags: ["concurrency", "repl", "debug_only"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: "--suites=concurrency_simultaneous_replication_wiredtiger_eviction_debug --storageEngine=wiredTiger --mongodSetParameters='{enableIndexBuildCommitQuorum: false}'"
resmoke_jobs_max: 1
- <<: *task_template
name: read_concern_linearizable_passthrough
tags: ["read_write_concern", "linearize", "large"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=read_concern_linearizable_passthrough --storageEngine=wiredTiger
- name: read_concern_majority_passthrough_gen
tags: ["read_write_concern"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 10
- <<: *task_template
name: write_concern_majority_passthrough
tags: ["read_write_concern", "large", "write"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=write_concern_majority_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: cwrwc_passthrough
tags: ["read_write_concern", "large", "write"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=cwrwc_passthrough --storageEngine=wiredTiger
- name: cwrwc_rc_majority_passthrough_gen
tags: ["read_write_concern"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 10
- <<: *task_template
name: cwrwc_wc_majority_passthrough
tags: ["read_write_concern", "large", "write"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=cwrwc_wc_majority_passthrough --storageEngine=wiredTiger
- name: secondary_reads_passthrough_gen
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 12
- <<: *task_template
name: replica_sets
tags: ["replica_sets", "san", "large"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=replica_sets --storageEngine=wiredTiger
- name: replica_sets_ese_gen
tags: ["replica_sets", "encrypt", "san"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- name: replica_sets_ese_gcm_gen
tags: ["replica_sets", "encrypt", "san", "gcm"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- name: replica_sets_auth_gen
tags: ["replica_sets", "common", "san", "auth"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 14
- name: replica_sets_large_txns_format_gen
tags: ["replica_sets", "multi_oplog", "san"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- name: replica_sets_max_mirroring_gen
tags: ["replica_sets", "san"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- <<: *task_template
name: replica_sets_multiversion
tags: ["random_multiversion_replica_sets"]
commands:
- func: "git get project"
- func: "do setup"
- func: "do multiversion setup"
- command: shell.exec
params:
working_dir: src
script: |
set -o errexit
set -o verbose
${activate_virtualenv}
$python buildscripts/evergreen_gen_multiversion_tests.py generate-exclude-files --suite=replica_sets_multiversion --task-path-suffix=/data/multiversion --is-generated-suite=false
- func: "run tests"
vars:
task_path_suffix: /data/multiversion
resmoke_args: --suites=replica_sets_multiversion --storageEngine=wiredTiger
- <<: *task_template
name: sasl
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=sasl --storageEngine=wiredTiger
- name: sharding_gen
tags: ["sharding", "common"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_multiversion_gen
tags: ["random_multiversion_replica_sets"]
commands:
- func: "generate randomized multiversion tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
use_multiversion: /data/multiversion
suite: sharding_multiversion
- name: sharding_rs_matching_disabled_gen
tags: ["sharding", "common"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_rs_matching_match_busiest_node_gen
tags: ["sharding", "common"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_max_mirroring_gen
tags: ["sharding", "common"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_csrs_continuous_config_stepdown_gen
tags: ["sharding", "common", "csrs"]
commands:
- func: "generate resmoke tasks"
vars:
suite: sharding_continuous_config_stepdown
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 25
- name: sharding_ese_gen
tags: ["sharding", "encrypt"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_ese_gcm_gen
tags: ["sharding", "encrypt", "gcm"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 32
- name: sharding_op_query_gen
tags: ["sharding", "common", "op_query"]
commands:
- func: "generate resmoke tasks"
vars:
suite: sharding
use_large_distro: "true"
resmoke_args: --shellReadMode=legacy --storageEngine=wiredTiger --excludeWithAnyTags=requires_find_command
fallback_num_sub_suites: 31
- name: sharding_auth_gen
tags: ["sharding", "auth"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 31
- name: sharding_auth_audit_gen
tags: ["auth", "audit"]
depends_on:
- name: sharding_auth_gen
commands:
- func: "generate resmoke tasks"
vars:
depends_on: sharding_auth
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 31
- name: sharding_last_stable_mongos_and_mixed_shards_gen
tags: ["sharding", "common", "multiversion"]
commands:
- func: "generate resmoke tasks"
vars:
use_large_distro: "true"
use_multiversion: /data/multiversion
resmoke_args: ""
fallback_num_sub_suites: 24
- <<: *task_template
name: snmp
commands:
- func: "do setup"
- func: "do snmp setup"
- func: "run tests"
vars:
snmp_config_path: SNMPCONFPATH=snmpconf
resmoke_args: --suites=snmp --storageEngine=wiredTiger
- name: ssl_gen
tags: ["encrypt", "ssl"]
commands:
- func: "generate resmoke tasks"
vars:
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- name: sslSpecial_gen
tags: ["encrypt", "ssl"]
commands:
- func: "generate resmoke tasks"
vars:
suite: ssl_special
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 3
- <<: *task_template
name: jsCore_decimal
tags: ["jscore", "common", "decimal"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=decimal --storageEngine=wiredTiger
- <<: *task_template
name: read_only
tags: ["read_only"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=read_only --storageEngine=wiredTiger
- <<: *task_template
name: read_only_sharded
tags: ["read_only"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=read_only_sharded --storageEngine=wiredTiger
- <<: *task_template
name: session_jscore_passthrough
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=session_jscore_passthrough --storageEngine=wiredTiger
- name: causally_consistent_jscore_passthrough_gen
tags: ["causally_consistent"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 14
- name: causally_consistent_jscore_passthrough_auth_gen
tags: ["causally_consistent"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- name: sharded_causally_consistent_jscore_passthrough_gen
tags: ["causally_consistent"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 15
- name: retryable_writes_jscore_passthrough_gen
tags: ["retry"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
use_large_distro: "true"
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 9
- name: logical_session_cache_replication_default_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "repl"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 17
- name: logical_session_cache_replication_100ms_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "repl"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 17
- name: logical_session_cache_replication_1sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "one_sec", "repl"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 16
- name: logical_session_cache_replication_10sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "repl"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 16
- name: logical_session_cache_sharding_default_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 12
- name: logical_session_cache_sharding_100ms_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 48
- name: logical_session_cache_sharding_1sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "one_sec"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 20
- name: logical_session_cache_sharding_10sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 14
- name: logical_session_cache_standalone_default_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- name: logical_session_cache_standalone_100ms_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- name: logical_session_cache_standalone_1sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache", "one_sec"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- name: logical_session_cache_standalone_10sec_refresh_jscore_passthrough_gen
tags: ["logical_session_cache"]
depends_on:
- name: jsCore
commands:
- func: "generate resmoke tasks"
vars:
depends_on: jsCore
resmoke_args: --storageEngine=wiredTiger
fallback_num_sub_suites: 5
- <<: *task_template
name: retryable_writes_jscore_stepdown_passthrough
tags: ["retry"]
depends_on:
- name: jsCore
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=retryable_writes_jscore_stepdown_passthrough --storageEngine=wiredTiger
- <<: *task_template
name: watchdog_wiredtiger
tags: ["watchdog"]
commands:
- func: "do setup"
- func: "do watchdog setup"
- func: "run tests"
vars:
resmoke_args: --suites=watchdog --storageEngine=wiredTiger
resmoke_jobs_max: 1
# This is a separate task because it is only supported on Ubuntu 16.04+ which are not inmemory builders
- <<: *task_template
name: watchdog_inmemory
tags: ["watchdog"]
commands:
- func: "do setup"
- func: "do watchdog setup"
- func: "run tests"
vars:
resmoke_args: --suites=watchdog --storageEngine=inMemory
resmoke_jobs_max: 1
- <<: *task_template
name: free_monitoring
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=free_monitoring --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: client_encrypt
tags: ["ssl", "encrypt"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=client_encrypt --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: fle
tags: ["encrypt"]
commands:
- func: "do setup"
- func: "load aws test credentials"
- func: "run tests"
vars:
resmoke_args: --suites=fle --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: ocsp
tags: ["ssl", "ocsp"]
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=ocsp
resmoke_jobs_max: 1
- <<: *task_template
name: jsonSchema
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=json_schema --storageEngine=wiredTiger
resmoke_jobs_max: 1
- name: powercycle
tags: ["powercycle"]
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
mongod_extra_options: --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --storageEngine wiredTiger\"
- name: powercycle_kill_mongod
tags: ["powercycle"]
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
crash_options: --crashMethod=kill --crashWaitTime=45 --jitterForCrashWaitTime=5 --instanceId=${instance_id}
mongod_extra_options: --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --storageEngine wiredTiger\"
- name: powercycle_fcv4.2
tags: ["powercycle"]
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
client_options: --numCrudClients=20 --numFsmClients=20
mongod_options: --mongodUsablePorts ${standard_port} ${secret_port} --dbPath=${db_path} --logPath=${log_path} --fcv=4.2
mongod_extra_options: --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --storageEngine wiredTiger\"
- name: powercycle_replication
tags: ["powercycle"]
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
mongod_extra_options: --replSet=powercycle --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --storageEngine wiredTiger\"
- name: powercycle_replication_smalloplog
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
mongod_extra_options: --replSet=powercycle --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --oplogSize 20 --storageEngine wiredTiger\"
- name: powercycle_syncdelay
tags: ["powercycle"]
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
mongod_extra_options: --mongodOptions=\"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --syncdelay 10 --storageEngine wiredTiger\"
- name: powercycle_write_concern_majority
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "set up remote credentials"
vars:
<<: *powercycle_remote_credentials
- func: "set up EC2 instance"
vars:
<<: *powercycle_ec2_instance
- command: expansions.update
<<: *powercycle_expansions
- func: "run powercycle test"
timeout_secs: 1800 # 30 minute timeout for no output
vars:
<<: *powercycle_test
client_options: "--numCrudClients=20 --numFsmClients=20 --writeConcern='{\"w\": \"majority\"}'"
mongod_extra_options: --mongodOptions \"--setParameter enableTestCommands=1 --setParameter logComponentVerbosity='{storage:{recovery:2}}' --storageEngine wiredTiger\"
- name: idl_tests
depends_on:
- name: compile
commands:
- func: "do setup"
- func: "run idl tests"
- name: buildscripts_test
depends_on: []
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "get buildnumber"
- func: "set up credentials"
- func: "do multiversion setup"
- func: "run tests"
vars:
resmoke_args: --suites=buildscripts_test
resmoke_jobs_max: 1
- name: test_packages
depends_on:
- name: package
commands:
- func: "git get project"
- func: "fetch packages"
- func: "set up remote credentials"
vars:
private_key_file: ~/.ssh/kitchen.pem
private_key_remote: ${kitchen_private_key}
aws_key_remote: ${kitchen_aws_key}
aws_secret_remote: ${kitchen_aws_secret}
- func: "run kitchen"
- name: package
commands:
- func: "scons compile"
vars:
targets: >-
archive-dist
archive-dist-debug
archive-shell
archive-shell-debug
${msi_target|}
task_compile_flags: >-
--detect-odr-violations
--separate-debug
--legacy-tarball
- func: "run packager.py"
- command: archive.targz_pack
params:
target: "packages.tgz"
source_dir: "src"
include:
- "repo/**"
- "./**.msi"
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-dist.${ext|tgz}
remote_file: ${project}/${build_variant}/${revision}/dist/mongo-${build_id}.${ext|tgz}
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: Dist Tarball
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-dist-debugsymbols.${ext|tgz}
remote_file: ${project}/${build_variant}/${revision}/dist/mongo-${build_id}-debugsymbols.${ext|tgz}
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: Dist Debugsymbols
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: packages.tgz
remote_file: ${project}/${build_variant}/${revision}/artifacts/${build_id}-packages.tgz
bucket: mciuploads
permissions: public-read
content_type: application/tar
display_name: Packages
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-shell.${ext|tgz}
remote_file: ${mongo_shell}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Shell
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-debugsymobls.${ext|tgz}
remote_file: ${mongo_shell_debugsymbols}
bucket: mciuploads
permissions: public-read
content_type: ${content_type|application/gzip}
display_name: Shell Debugsymbols
- name: publish_packages
tags: ["publish"]
# This should prevent this task from running in patch builds, where we
# don't want to publish packages.
patchable: false
stepback: false
# Same dependencies as "push" below
depends_on:
- name: compile
- name: package
- name: jsCore
- name: dbtest
- name: replica_sets_jscore_passthrough
commands:
- func: "fetch artifacts"
- func: "fetch packages"
- func: "apply compile expansions"
- func: "set up remote credentials"
vars:
aws_key_remote: ${repo_aws_key}
aws_secret_remote: ${repo_aws_secret}
- func: "set up notary client credentials"
- command: shell.exec
params:
working_dir: src
script: |
. ./notary_env.sh
set -o errexit
set -o verbose
CURATOR_RELEASE=${curator_release|"latest"}
curl -L -O http://boxes.10gen.com/build/curator/curator-dist-rhel70-$CURATOR_RELEASE.tar.gz
tar -zxvf curator-dist-rhel70-$CURATOR_RELEASE.tar.gz
./curator repo submit --service ${barque_url} --config ./etc/repo_config.yaml --distro ${packager_distro} --edition ${repo_edition} --version ${version} --arch ${packager_arch} --packages https://s3.amazonaws.com/mciuploads/${project}/${build_variant}/${revision}/artifacts/${build_id}-packages.tgz
- name: push
tags: ["publish"]
patchable: false
depends_on:
- name: package
- name: jsCore
- name: dbtest
- name: replica_sets_jscore_passthrough
stepback: false
commands:
- func: "fetch artifacts"
- func: "fetch packages"
- func: "fetch dist tarball"
# Fetch the shell
- command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${mongo_shell}
bucket: mciuploads
local_file: src/mongo-shell.tgz
# Fetch mongocryptd
- command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${mongo_cryptd}
bucket: mciuploads
local_file: src/mongo-cryptd.tgz
build_variants: *mongocryptd_variants
- func: "fetch mongohouse binaries"
# Fetch the sources (on relevant variants only)
- command: s3.get
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: ${project}/${build_variant}/${revision}/sources/mongo-src-${build_id}.${ext|tgz}
bucket: mciuploads
local_file: src/distsrc.${ext|tgz}
build_variants: [rhel70, windows]
- func: "apply compile expansions"
- func: "fetch dist debugsymbols"
- func: "set up remote credentials"
vars:
aws_key_remote: ${repo_aws_key}
aws_secret_remote: ${repo_aws_secret}
- func: "set up notary client credentials"
- command: shell.exec
params:
working_dir: src
script: |
. ./notary_env.sh
set -o errexit
set -o verbose
mv mongo-binaries.tgz mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}
mv mongo-shell.tgz mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}
mv mongo-cryptd.tgz mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz} || true
mv mh.tgz mh-${push_name}-${push_arch}-${suffix}.${ext|tgz} || true
mv mongo-debugsymbols.tgz mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz} || true
mv distsrc.${ext|tgz} mongodb-src-${src_suffix}.${ext|tar.gz} || true
/usr/bin/find build/ -type f | grep msi$ | xargs -I original_filename cp original_filename mongodb-${push_name}-${push_arch}-${suffix}.msi || true
/usr/local/bin/notary-client.py --key-name "server-4.6" --auth-token-file ${workdir}/src/signing_auth_token --comment "Evergreen Automatic Signing ${revision} - ${build_variant} - ${branch_name}" --notary-url http://notary-service.build.10gen.cc:5000 --skip-missing mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz} mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz} mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz} mongodb-${push_name}-${push_arch}-${suffix}.msi mongodb-src-${src_suffix}.${ext|tar.gz} mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}
# Put the binaries tarball/zipfile
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}
# Put the shell tarball/zipfile
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}
# Put the cryptd tarball/zipfile
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}
build_variants: *mongocryptd_variants
# Put the mh tarball/zipfile
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mh-${push_name}-${push_arch}-${suffix}.${ext|tgz}
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mh-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}
build_variants: *mh_variants
# Put the source tarball
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-src-${src_suffix}.${ext|tar.gz}
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}
build_variants: [rhel70, windows]
# Put the debug symbols
- command: s3.put
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
permissions: public-read
local_file: src/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}
bucket: build-push-testing
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}
optional: true
# Put the binaries tarball signature
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig
# Put the shell tarball signature
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig
# Put the cryptd tarball signature
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig
build_variants: *mongocryptd_variants
# Put the source tarball signature
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-src-${src_suffix}.${ext|tar.gz}.sig
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sig
build_variants: [rhel70, windows]
# Put the debug symbols signature
- command: s3.put
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
permissions: public-read
local_file: src/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sig
bucket: build-push-testing
content_type: ${content_type|application/gzip}
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sig
optional: true
# Put the signed MSI file
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
permissions: public-read
build_variants: ["enterprise-windows", "windows"]
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi
bucket: build-push-testing
content_type: application/x-msi
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi
# Put the binaries tarball sha1
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1
aws_key: ${aws_key}
permissions: public-read
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1
# Put the shell tarball sha1
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1
aws_key: ${aws_key}
permissions: public-read
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1
# Put the cryptd tarball sha1
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1
aws_key: ${aws_key}
permissions: public-read
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1
build_variants: *mongocryptd_variants
# Put the source tarball sha1
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-src-${src_suffix}.${ext|tar.gz}.sha1
aws_key: ${aws_key}
permissions: public-read
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sha1
build_variants: [rhel70, windows]
# Put the debug symbols sha1
- command: s3.put
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
permissions: public-read
local_file: src/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sha1
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sha1
optional: true
# Push the signed MSI sha1
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
permissions: public-read
build_variants: ["enterprise-windows", "windows"]
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.sha1
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.sha1
# Put the binaries tarball sha256
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256
permissions: public-read
aws_key: ${aws_key}
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256
# Put the shell tarball sha256
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256
permissions: public-read
aws_key: ${aws_key}
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256
# Put the cryptd tarball sha256
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256
permissions: public-read
aws_key: ${aws_key}
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256
build_variants: *mongocryptd_variants
# Put the source tarball sha256
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-src-${src_suffix}.${ext|tar.gz}.sha256
permissions: public-read
aws_key: ${aws_key}
bucket: build-push-testing
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sha256
build_variants: [rhel70, windows]
# Put the debug symbols sha256
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sha256
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sha256
optional: true
# Put the signed MSI sha256
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
build_variants: ["enterprise-windows", "windows"]
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.sha256
bucket: build-push-testing
permissions: public-read
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.sha256
content_type: text/plain
# Put the binaries tarball md5
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5
# Put the shell tarball md5
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5
# Put the cryptd tarball md5
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5
build_variants: *mongocryptd_variants
# Put the source tarball md5
- command: s3.put
params:
aws_secret: ${aws_secret}
local_file: src/mongodb-src-${src_suffix}.${ext|tar.gz}.md5
aws_key: ${aws_key}
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.md5
build_variants: [rhel70, windows]
# Put the debug symbols md5
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.md5
bucket: build-push-testing
content_type: text/plain
permissions: public-read
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.md5
optional: true
# Put the signed MSI md5
- command: s3.put
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
build_variants: ["enterprise-windows", "windows"]
local_file: src/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.md5
bucket: build-push-testing
permissions: public-read
content_type: text/plain
remote_file: ${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.md5
- command: s3Copy.copy
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
s3_copy_files:
#Binaries
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}', 'bucket': '${push_bucket}'}}
#Shell
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}', 'bucket': '${push_bucket}'}}
#Cryptd
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}', 'bucket': '${push_bucket}'},
'build_variants': *mongocryptd_variants}
# MH
- {'source': {'path': '${push_path}-STAGE/${push_name}/mh-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mh-${push_name}-${push_arch}-${suffix}.${ext|tgz}', 'bucket': '${push_bucket}'},
'build_variants': *mh_variants}
#Source tarball
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}', 'bucket': 'build-push-testing'},
'destination': {'path': 'src/mongodb-src-${src_suffix}.${ext|tar.gz}', 'bucket': '${push_bucket}'},
'build_variants': ['rhel70', 'windows']}
#MSI (Windows only)
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi', 'bucket': '${push_bucket}'},
'build_variants': ['enterprise-windows', 'windows']}
#Binaries Signature
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig', 'bucket': '${push_bucket}'}}
#Shell Signature
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig', 'bucket': '${push_bucket}'}}
#Cryptd Signature
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sig', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sig', 'bucket': '${push_bucket}'},
'build_variants': *mongocryptd_variants}
#Source tarball signature
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sig', 'bucket': 'build-push-testing'},
'destination': {'path': 'src/mongodb-src-${src_suffix}.${ext|tar.gz}.sig', 'bucket': '${push_bucket}'},
'build_variants': ['rhel70', 'windows']}
#SHA1 for binaries
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1', 'bucket': '${push_bucket}'}}
#SHA1 for shell
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1', 'bucket': '${push_bucket}'}}
#SHA1 for cryptd
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha1', 'bucket': '${push_bucket}'},
'build_variants': *mongocryptd_variants}
#SHA1 for source tarball
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': 'src/mongodb-src-${src_suffix}.${ext|tar.gz}.sha1', 'bucket': '${push_bucket}'},
'build_variants': ['rhel70', 'windows']}
#SHA1 for MSI
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.sha1', 'bucket': '${push_bucket}'},
'build_variants': ['enterprise-windows', 'windows']}
#SHA256 for binaries
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256', 'bucket': '${push_bucket}'}}
#SHA256 for shell
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256', 'bucket': '${push_bucket}'}}
#SHA256 for cryptd
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.sha256', 'bucket': '${push_bucket}'},
'build_variants': *mongocryptd_variants}
#SHA256 for source tarball
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': 'src/mongodb-src-${src_suffix}.${ext|tar.gz}.sha256', 'bucket': '${push_bucket}'},
'build_variants': ['rhel70', 'windows']}
#SHA256 for MSI files
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.sha256', 'bucket': '${push_bucket}'},
'build_variants': ['enterprise-windows', 'windows']}
#MD5 for binaries
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5', 'bucket': '${push_bucket}'}}
#MD5 for shell
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-shell-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-shell-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5', 'bucket': '${push_bucket}'}}
#MD5 for cryptd
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}-${task_id}.${ext|tgz}.md5', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-cryptd-${push_name}-${push_arch}-${suffix}.${ext|tgz}.md5', 'bucket': '${push_bucket}'},
'build_variants': *mongocryptd_variants}
#MD5 for source tarball
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-src-${src_suffix}-${task_id}.${ext|tar.gz}.md5', 'bucket': 'build-push-testing'},
'destination': {'path': 'src/mongodb-src-${src_suffix}.${ext|tar.gz}.md5', 'bucket': '${push_bucket}'},
'build_variants': ['rhel70', 'windows']}
#MD5 for MSIs
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-${suffix}-${task_id}-signed.msi.md5', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-${suffix}-signed.msi.md5', 'bucket': '${push_bucket}'},
'build_variants': ['enterprise-windows', 'windows']}
# Debug symbols are not created for all variants and the copy is optional.
- command: s3Copy.copy
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
optional: true
s3_copy_files:
#Debug Symbols
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}', 'bucket': '${push_bucket}'}}
#Debug Symbols Signature
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sig', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sig', 'bucket': '${push_bucket}'}}
#SHA1 for debug symbols
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sha1', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sha1', 'bucket': '${push_bucket}'}}
#SHA256 for debugsymbols
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.sha256', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.sha256', 'bucket': '${push_bucket}'}}
#MD5 for debugsymbols
- {'source': {'path': '${push_path}-STAGE/${push_name}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}-${task_id}.${ext|tgz}.md5', 'bucket': 'build-push-testing'},
'destination': {'path': '${push_path}/mongodb-${push_name}-${push_arch}-debugsymbols-${suffix}.${ext|tgz}.md5', 'bucket': '${push_bucket}'}}
- <<: *task_template
name: search
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=search --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: search_auth
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=search_auth --storageEngine=wiredTiger
resmoke_jobs_max: 1
- <<: *task_template
name: search_ssl
commands:
- func: "do setup"
- func: "run tests"
vars:
resmoke_args: --suites=search_ssl --storageEngine=wiredTiger
resmoke_jobs_max: 1
- name: shared_scons_cache_pruning
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on: []
commands:
- command: manifest.load
- func: "git get project"
- func: "shared scons cache pruning"
- name: win_shared_scons_cache_pruning
exec_timeout_secs: 7200 # 2 hour timeout for the task overall
depends_on: []
commands:
- command: manifest.load
- func: "git get project"
- func: "set up win mount script"
- func: "shared scons cache pruning"
- name: commit_queue_placeholder
commands:
- command: shell.exec
params:
script: |
echo "Commit Queue Placeholder"
exit 0
- name: validate_commit_message
commands:
- command: manifest.load
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- command: shell.exec
type: test
params:
working_dir: src
shell: bash
script: |
set -o verbose
set -o errexit
if [ "${is_commit_queue}" = "true" ]; then
# Since `commit_message` is an evergreen expansion, we need a way to ensure we
# properly deal with any special characters that could cause issues (like "). To
# do this, we will write it out to a file, then read that file into a variable.
cat > commit_message.txt <<END_OF_COMMIT_MSG
${commit_message}
END_OF_COMMIT_MSG
commit_message_content=$(cat commit_message.txt)
${activate_virtualenv}
$python buildscripts/validate_commit_message.py "$commit_message_content"
fi
- <<: *task_template
name: mqlrun
commands:
- func: "do setup"
- func: "fetch mongohouse binaries"
- func: "extract mongohouse binaries"
- func: "run tests"
vars:
resmoke_args: --suites=mqlrun
#######################################
# Task Groups #
#######################################
task_groups:
- <<: *compile_task_group_template
name: compile_without_package_TG
tasks:
- compile
- <<: *compile_task_group_template
name: compile_TG
tasks:
- compile
- package
- <<: *compile_task_group_template
name: compile_core_tools_TG
tasks:
- compile_core_tools
- <<: *compile_task_group_template
name: compile_ninja_TG
tasks:
- compile_ninja
teardown_task:
- command: s3.put
params:
optional: true
aws_key: ${aws_key}
aws_secret: ${aws_secret}
local_file: src/all.build.ninja
remote_file: ${project}/${build_variant}/${revision}/artifacts/all.${build_id}.build.ninja
bucket: mciuploads
permissions: public-read
content_type: text/plain
display_name: build.ninja
- <<: *compile_task_group_template
name: server_discovery_and_monitoring_json_test_TG
tasks:
- server_discovery_and_monitoring_json_test
- <<: *compile_task_group_template
name: dbtest_TG
tasks:
- dbtest
- <<: *compile_task_group_template
name: libfuzzertests_TG
tasks:
- compile_libfuzzertests
- libfuzzertests
- <<: *compile_task_group_template
name: compile_all_run_unittests_TG
tasks:
- compile
- unittests
- dbtest
- compile_all
- package
- name: clang_tidy_TG
setup_group_can_fail_task: true
setup_group:
- command: manifest.load
- func: "kill processes"
- func: "cleanup environment"
- func: "git get project"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "configure evergreen api credentials"
- func: "get buildnumber"
- func: "set up credentials"
- func: "set up win mount script"
- func: "generate compile expansions"
teardown_group:
- func: "umount shared scons directory"
- func: "cleanup environment"
setup_task:
- func: "apply compile expansions"
- func: "set task expansion macros"
teardown_task:
tasks:
- clang_tidy
- name: embedded_sdk_build_and_test
setup_group_can_fail_task: true
max_hosts: 1
setup_group:
- command: manifest.load
- func: "git get project"
- func: "get buildnumber"
- func: "set up credentials"
- func: "set task expansion macros"
- func: "set up virtualenv"
- func: "upload pip requirements"
- func: "set up win mount script"
- func: "generate compile expansions"
teardown_group:
- func: "umount shared scons directory"
setup_task:
- func: "set task expansion macros"
- func: "apply compile expansions"
teardown_task:
tasks:
- "embedded_sdk_build_cdriver"
- "embedded_sdk_install_dev"
- "embedded_sdk_s3_put"
- "embedded_sdk_install_tests"
- "embedded_sdk_tests_s3_put"
- "embedded_sdk_run_tests"
- "embedded_sdk_s3_put_latest"
- "embedded_sdk_tests_s3_put_latest"
- <<: *stitch_support_task_group_template
name: stitch_support_lib_build_and_archive
tags: ["stitch"]
tasks:
- "stitch_support_create_lib"
- <<: *stitch_support_task_group_template
name: stitch_support_lib_build_and_test
tags: ["stitch"]
max_hosts: 1
tasks:
- "stitch_support_install_tests"
- "stitch_support_run_tests"
#######################################
# Modules #
#######################################
# if a module is added and to be added to the manifest
# be sure to add the module to git.get_project revisions parameter
modules:
- name: enterprise
repo: git@github.com:10gen/mongo-enterprise-modules.git
prefix: src/mongo/db/modules
branch: master
- name: wtdevelop
repo: git@github.com:wiredtiger/wiredtiger.git
prefix: src/third_party
branch: develop
#######################################
# Buildvariants #
#######################################
buildvariants:
###########################################
# Linux buildvariants #
###########################################
- name: linux-64-repeated-execution
stepback: false
display_name: ~ Linux Repeated Execution
run_on:
- rhel62-small
expansions:
compile_flags: -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --enable-free-mon=off --enable-http-client=off
test_flags: >-
--excludeWithAnyTags=requires_http_client
--repeatSuites=10
--shuffle
resmoke_repeat_suites: 10
# TODO: There are currently 2 ways of repeating suites, one for regular suites and one for
# generated suites. Once everything is converted to generated suites, we should remove the
# '--repeatSuites=10' from the test_flags. This will be done in SERVER-38817.
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .integration !.audit
distros:
- rhel62-large
- name: jsCore
- name: jsCore_txns
- name: .logical_session_cache
- name: parallel_gen
- name: .concurrency .common !.kill_terminate
distros:
- rhel62-large
- name: concurrency_replication_causal_consistency
distros:
- rhel62-large
- &linux-64-debug-template
name: linux-64-debug
display_name: "! Linux DEBUG"
batchtime: 240 # 4 hours
run_on:
- rhel62-large
expansions:
resmoke_jobs_factor: 0.5 # Avoid starting too many mongod's
compile_flags: --dbg=on --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --enable-free-mon=off --enable-http-client=off
scons_cache_scope: shared
test_flags: --excludeWithAnyTags=requires_http_client
target_resmoke_time: 15
max_sub_suites: 100
tasks:
- name: compile_all_run_unittests_TG
- name: .aggregation !.encrypt
- name: .auth !.audit !.gle !.multiversion
- name: bulk_gle_passthrough
- name: .causally_consistent !.wo_snapshot
- name: .change_streams !.secondary_reads
- name: .misc_js
- name: disk_wiredtiger
- name: .integration !.audit
- name: .jscore .common
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: libunwind_tests
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: .ocsp
- name: .read_write_concern
- name: .replica_sets !.encrypt
- name: replica_sets_reconfig_jscore_passthrough
- name: .retry
- name: .read_only
- name: session_jscore_passthrough
- name: sharded_multi_stmt_txn_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot
- name: sharding_gen
- name: .stitch
- name: server_discovery_and_monitoring_json_test_TG
- <<: *linux-64-debug-template
name: linux-64-debug-wtdevelop
display_name: "~ Linux DEBUG WiredTiger develop"
batchtime: 1440 # 1 day
modules:
- wtdevelop
expansions:
use_wt_develop: true
resmoke_jobs_factor: 0.5 # Avoid starting too many mongod's
compile_flags: --dbg=on --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --enable-free-mon=off --enable-http-client=off
test_flags: --excludeWithAnyTags=requires_http_client
- name: linux-64-duroff
display_name: Linux (No Journal)
run_on:
- rhel62-small
expansions: &linux-64-required-duroff-expansions
compile_flags: -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --enable-free-mon=off --enable-http-client=off
multiversion_platform: rhel62
multiversion_edition: targeted
# Running WiredTiger with --nojournal in a replica set is no longer supported, so this variant
# does not include replica set tests. Since transactions are only supported on replica sets, we
# exclude those tests as well.
test_flags: --nojournal --excludeWithAnyTags=requires_journaling,requires_replication,requires_sharding,uses_transactions,requires_http_client
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .aggfuzzer .common
- name: aggregation
- name: aggregation_auth
- name: auth_gen
- name: bulk_gle_passthrough
- name: .misc_js !.sharded
- name: concurrency
- name: concurrency_simultaneous
- name: disk_wiredtiger
- name: failpoints_auth
- name: .jscore .common !.decimal !.txns
- name: .jstestfuzz .common !.repl
- name: sharding_jscore_passthrough
- name: ubuntu1804
display_name: Ubuntu 18.04
run_on:
- ubuntu1804-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: ubuntu1804
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: ubuntu1804
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.audit !.multiversion
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- ubuntu1804-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: replica_sets_jscore_passthrough
- name: .sharding .txns
- name: sharding_gen
- name: sharding_jscore_passthrough
- name: watchdog_wiredtiger
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-ubuntu1804-64
display_name: Enterprise Ubuntu 18.04
modules:
- enterprise
run_on:
- ubuntu1804-test
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-ubuntu1804
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: ubuntu1804
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: ubuntu1804
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-build
- name: compile_ninja_TG
- name: .aggfuzzer .common
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.aggregation !.replica_sets !.sharding !.jscore
- name: external_auth
- name: external_auth_aws
- name: .jscore .common !.compat !.decimal !.sharding
- name: jsCore_auth
- name: .jstestfuzz .common
- name: .jstestfuzz_multiversion_gen
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .ocsp
- name: .random_multiversion_replica_sets
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .watchdog
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: ubuntu1604
display_name: Ubuntu 16.04
run_on:
- ubuntu1604-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-ubuntu1604
lang_environment: LANG=C
compile_flags: --ssl MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: ubuntu1604
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: ubuntu1604
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- ubuntu1604-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jepsen
distros:
- ubuntu1604-build
- name: .jscore .common
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: .powercycle
distros:
- ubuntu1604-powercycle
- name: powercycle_replication_smalloplog
distros:
- ubuntu1604-powercycle
- name: powercycle_write_concern_majority
distros:
- ubuntu1604-powercycle
- name: replica_sets
- name: watchdog_wiredtiger
- name: .replica_sets .common
- name: .sharding .common !.op_query !.csrs
- name: .sharding .txns
- name: .stitch
- name: .ssl
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-ubuntu1604-arm64
display_name: Enterprise Ubuntu 16.04 arm64
modules:
- enterprise
run_on:
- ubuntu1604-arm64-large
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: aarch64-enterprise-ubuntu1604
compile_flags: --ssl MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) CCFLAGS="-march=armv8-a+crc -mtune=generic" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 8 # Avoid starting too many mongod's on ARM test servers
has_packages: true
packager_script: packager_enterprise.py
packager_arch: arm64
packager_distro: ubuntu1604
repo_edition: enterprise
multiversion_platform: ubuntu1604
multiversion_architecture: arm64
multiversion_architecture_42_or_later: aarch64
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
- name: aggregation
- name: .auth !.audit !.multiversion !.jscore
- name: .misc_js
- name: fle
- name: .jscore .common !.auth
- name: .jstestfuzz .common
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: sharding_gen
- name: .ssl
- name: .stitch
- name: .publish
distros:
- ubuntu1604-test
- name: enterprise-ubuntu1804-arm64
display_name: Enterprise Ubuntu 18.04 arm64
modules:
- enterprise
run_on:
- ubuntu1804-arm64-build
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: aarch64-enterprise-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 -j$(grep -c ^processor /proc/cpuinfo) CCFLAGS="-march=armv8-a+crc -mtune=generic" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 8 # Avoid starting too many mongod's on ARM test servers
has_packages: true
packager_script: packager_enterprise.py
packager_arch: arm64
packager_distro: ubuntu1804
repo_edition: enterprise
multiversion_platform: ubuntu1804
multiversion_architecture: arm64
multiversion_architecture_42_or_later: aarch64
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
- name: aggregation
- name: aggregation_wildcard_fuzzer_gen
- name: .auth !.audit !.multiversion !.jscore
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
- name: fle
- name: .jscore .common !.auth
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: sharding_gen
- name: sharding_jscore_passthrough
- name: .ssl
- name: .stitch
- name: .publish
distros:
- ubuntu1804-test
- name: ubuntu1804-arm64
display_name: Ubuntu 18.04 arm64
run_on:
- ubuntu1804-arm64-build
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: aarch64-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 -j$(grep -c ^processor /proc/cpuinfo) CCFLAGS="-march=armv8-a+crc -mtune=generic" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 8 # Avoid starting too many mongod's on ARM test servers
has_packages: true
packager_script: packager.py
packager_arch: arm64
packager_distro: ubuntu1804
repo_edition: org
multiversion_platform: ubuntu1804
multiversion_architecture: arm64
multiversion_architecture_42_or_later: aarch64
multiversion_edition: targeted
tasks:
- name: compile_all_run_unittests_TG
- name: free_monitoring
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: .publish
distros:
- ubuntu1804-test
- name: enterprise-ubuntu1804-ppc64le
display_name: Enterprise Ubuntu 18.04 PPC64LE
modules:
- enterprise
run_on:
- ubuntu1804-power8-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: ppc64le-enterprise-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 -j$(echo "$(grep -c processor /proc/cpuinfo)/2" | bc) CCFLAGS="-mcpu=power8 -mtune=power8 -mcmodel=medium" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 2
has_packages: true
packager_script: packager_enterprise.py
packager_arch: ppc64le
packager_distro: ubuntu1804
repo_edition: enterprise
multiversion_platform: ubuntu1804
multiversion_architecture: ppc64le
multiversion_edition: enterprise
use_default_timeouts: true
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-power8-build
- name: aggregation
- name: .auth !.audit !.multiversion !.jscore
- name: .misc_js
- name: fle
- name: .jscore .common !.auth
- name: .ocsp
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: sharding_gen
- name: .ssl
- name: .stitch
- name: .publish
distros:
- ubuntu1804-test
- name: ubuntu1804-s390x
display_name: Ubuntu 18.04 s390x
run_on:
- ubuntu1804-zseries-test
batchtime: 10080 # 7 days
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: s390x-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 --jlink=3 -j$(grep -c ^processor /proc/cpuinfo) CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: ubuntu1804
multiversion_edition: targeted
multiversion_architecture: s390x
has_packages: true
packager_script: packager.py
packager_arch: s390x
packager_distro: ubuntu1804
repo_edition: org
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-zseries-build
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: .publish
distros:
- ubuntu1804-test
- name: enterprise-ubuntu1804-s390x
display_name: Enterprise Ubuntu 18.04 s390x
modules:
- enterprise
run_on:
- ubuntu1804-zseries-test
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: s390x-enterprise-ubuntu1804
compile_flags: --ssl MONGO_DISTMOD=ubuntu1804 --jlink=3 -j$(grep -c ^processor /proc/cpuinfo) CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 2
has_packages: true
packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: ubuntu1804
repo_edition: enterprise
multiversion_platform: ubuntu1804
multiversion_architecture: s390x
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-zseries-build
- name: aggregation
- name: audit
- name: .auth !.multiversion !.jscore
- name: .misc_js
- name: .encrypt
- name: .integration !.audit
distros:
- ubuntu1804-zseries-build
- name: .jscore .common !.auth
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: .sharding .txns
- name: snmp
- name: .publish
distros:
- ubuntu1804-test
- name: enterprise-linux-64-amazon-ami
display_name: "Enterprise Amazon Linux"
modules:
- enterprise
run_on:
- amazon1-2018-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-amzn64
compile_flags: --ssl MONGO_DISTMOD=amzn64 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: amzn64
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: amazon
repo_edition: enterprise
scons_cache_scope: shared
virtualenv: /opt/mongodbtoolchain/v3/bin/virtualenv
tasks:
- name: compile_all_run_unittests_TG
distros:
- amazon1-2018-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.gle !.multiversion
- name: audit
- name: bulk_gle_passthrough
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.aggregation
- name: .jscore .common !.compat
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: noPassthrough_gen
- name: noPassthroughWithMongod_gen
- name: powercycle
- name: .replica_sets .common
- name: sasl
- name: serial_run
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .txns
- name: slow1_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: amazon
display_name: Amazon Linux
run_on:
- amazon1-2018-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-amazon
compile_flags: --ssl MONGO_DISTMOD=amazon -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: amazon
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: amazon
repo_edition: org
scons_cache_scope: shared
virtualenv: /opt/mongodbtoolchain/v3/bin/virtualenv
tasks:
- name: compile_all_run_unittests_TG
distros:
- amazon1-2018-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- amazon1-2018-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.op_query !.csrs
- name: .sharding .txns
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-amazon2
display_name: "Enterprise Amazon Linux 2"
modules:
- enterprise
run_on:
- amazon2-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
test_flags: >-
--excludeWithAnyTags=SERVER-34286
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-amazon2
compile_flags: --ssl MONGO_DISTMOD=amazon2 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: amazon2
multiversion_edition: enterprise
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: amazon2
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- amazon2-build
- name: .aggfuzzer .common
- name: aggregation
- name: audit
- name: .auth !.gle !.multiversion
- name: bulk_gle_passthrough
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.aggregation
- name: .jscore .common !.compat
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: noPassthrough_gen
- name: noPassthroughWithMongod_gen
- name: .replica_sets .common
- name: sasl
- name: serial_run
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .txns !.csrs
- name: slow1_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: amazon2
display_name: Amazon Linux 2
run_on:
- amazon2-test
expansions:
test_flags: >-
--excludeWithAnyTags=SERVER-34286
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-amazon2
compile_flags: --ssl MONGO_DISTMOD=amazon2 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
multiversion_platform: amazon
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: amazon2
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- amazon2-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- amazon2-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.op_query !.csrs
- name: .sharding .txns
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: tig-daily-cron
modules:
- enterprise
display_name: "~ TIG Daily Cron"
run_on:
- rhel62-small
stepback: false
tasks:
- name: lint_fuzzer_sanity_all
###########################################
# Windows buildvariants #
###########################################
- name: windows-debug
display_name: "* Windows DEBUG"
batchtime: 240 # 4 hours
run_on:
- windows-64-vs2019-small
expansions:
exe: ".exe"
content_type: application/zip
compile_flags: --dbg=on --opt=on --win-version-min=win10 -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) MONGO_DISTMOD=windows
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
multiversion_platform: windows
multiversion_edition: enterprise
large_distro_name: windows-64-vs2019-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- windows-64-vs2019-large
- name: .aggregation !.auth !.encrypt
- name: aggregation_expression_multiversion_fuzzer_gen
- name: aggregation_expression_optimization_fuzzer_gen
- name: auth_gen
- name: bulk_gle_passthrough
- name: .causally_consistent !.sharding
- name: .change_streams !.secondary_reads
- name: .misc_js !.non_win_dbg
- name: .concurrency .debug_only
distros:
- windows-64-vs2019-large
- name: disk_wiredtiger
- name: free_monitoring
- name: initial_sync_fuzzer_gen
- name: .integration !.audit
distros:
- windows-64-vs2019-large
- name: .jscore .common !.auth !.sharding
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: multi_shard_multi_stmt_txn_jscore_passthrough_gen
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: .read_write_concern !.large
- name: .read_write_concern .large
distros:
- windows-64-vs2019-large
- name: .read_only
- name: .rollbackfuzzer
- name: .replica_sets !.large !.encrypt !.auth
- name: .replica_sets .large
distros:
- windows-64-vs2019-large
- name: retryable_writes_jscore_passthrough_gen
- name: retryable_writes_jscore_stepdown_passthrough
distros:
- windows-64-vs2019-large
- name: session_jscore_passthrough
- name: sharding_gen
- name: .stitch
- name: server_discovery_and_monitoring_json_test_TG
- name: enterprise-windows-required
display_name: "! Enterprise Windows"
batchtime: 60 # 1 hours
modules:
- enterprise
run_on:
- windows-64-vs2019-small
expansions:
exe: ".exe"
msi_target: msi
additional_targets: archive-mongocryptd archive-mongocryptd-debug
mh_target: archive-mh archive-mh-debug
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
multiversion_platform: windows
multiversion_edition: enterprise
jstestfuzz_num_generated_files: 35
target_resmoke_time: 20
max_sub_suites: 100
large_distro_name: windows-64-vs2019-large
push_path: windows
push_bucket: downloads.10gen.com
push_name: windows
push_arch: x86_64-enterprise
tasks:
- name: compile_TG
distros:
- windows-64-vs2019-large
- name: burn_in_tests_gen
- name: buildscripts_test
- name: noPassthrough_gen
- name: server_discovery_and_monitoring_json_test_TG
- name: dbtest_TG
distros:
- windows-64-vs2019-large
- name: unittest_shell_hang_analyzer_gen
- name: enterprise-windows
display_name: "* Enterprise Windows"
batchtime: 240 # 4 hours
modules:
- enterprise
run_on:
- windows-64-vs2019-small
expansions:
exe: ".exe"
msi_target: msi
additional_targets: archive-mongocryptd archive-mongocryptd-debug
mh_target: archive-mh archive-mh-debug
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
multiversion_platform: windows
multiversion_edition: enterprise
jstestfuzz_num_generated_files: 35
target_resmoke_time: 20
max_sub_suites: 100
large_distro_name: windows-64-vs2019-large
push_path: windows
push_bucket: downloads.10gen.com
push_name: windows
push_arch: x86_64-enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- windows-64-vs2019-large
- name: audit
- name: auth_audit_gen
- name: buildscripts_test
- name: causally_consistent_jscore_txns_passthrough
distros:
- windows-64-vs2019-large
- name: .encrypt !.aggregation !.replica_sets !.sharding !.jscore
- name: external_auth
- name: external_auth_aws
- name: external_auth_windows
distros:
- windows-64-2016
- name: .jscore .common !.compat !.sharding
- name: jsCore_auth
- name: jsCore_ese
- name: jsCore_txns_large_txns_format
- name: .jstestfuzz .common
- name: mqlrun
- name: noPassthrough_gen
- name: noPassthroughWithMongod_gen
- name: .replica_sets .common
- name: .replica_sets .multi_oplog
- name: replica_sets_ese_gen
- name: sasl
- name: server_discovery_and_monitoring_json_test_TG
- name: .sharding .txns
- name: sharding_auth_gen
- name: sharding_auth_audit_gen
- name: sharding_ese_gen
- name: snmp
- name: unittest_shell_hang_analyzer_gen
- name: push
distros:
- rhel70-small
- <<: *enterprise-windows-nopush-template
name: enterprise-windows-benchmarks
display_name: "~ Enterprise Windows (Benchmarks)"
tasks:
- name: compile_benchmarks
distros:
- windows-64-vs2019-large
- name: .benchmarks
- <<: *enterprise-windows-nopush-template
name: enterprise-windows-wtdevelop
display_name: "~ Enterprise Windows WiredTiger develop"
modules:
- enterprise
- wtdevelop
expansions:
<<: *enterprise-windows-nopush-expansions-template
use_wt_develop: true
- name: enterprise-windows-async
display_name: "~ Enterprise Windows async"
modules:
- enterprise
run_on:
- windows-64-vs2019-small
stepback: true
batchtime: 10080 # 7 days
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
msi_target: msi
exe: ".exe"
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
test_flags: |- # Use the ServiceExecutorAdaptive with a reasonable number of starting threads
--serviceExecutor=adaptive \
--mongodSetParameters="adaptiveServiceExecutorReservedThreads: 8" \
--mongosSetParameters="adaptiveServiceExecutorReservedThreads: 8"
tasks:
- name: compile_TG
distros:
- windows-64-vs2019-large
- name: jsCore
- name: replica_sets
- name: sharding_gen
- name: enterprise-windows-inmem
display_name: Enterprise Windows (inMemory)
modules:
- enterprise
run_on:
- windows-64-vs2019-small
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
msi_target: msi
exe: ".exe"
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
test_flags: --storageEngine=inMemory --excludeWithAnyTags=requires_persistence,requires_journaling
ext: zip
scons_cache_scope: shared
multiversion_platform: windows
multiversion_edition: enterprise
large_distro_name: windows-64-vs2019-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- windows-64-vs2019-large
- name: .aggfuzzer .common
- name: audit
- name: auth_audit_gen
- name: causally_consistent_jscore_txns_passthrough
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- windows-64-vs2019-large
- name: initial_sync_fuzzer_gen
- name: .jscore .common !.decimal !.compat !.sharding
- name: .jstestfuzz .common !.flow_control # Flow control jstestfuzz take longer.
- name: .read_write_concern .linearize
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: replica_sets_multi_stmt_txn_jscore_passthrough
- name: sasl
- name: .sharding .txns
- name: sharding_auth_gen
- name: sharding_auth_audit_gen
- name: snmp
- name: .ssl
- name: windows
display_name: Windows
run_on:
- windows-64-vs2019-small
expansions:
msi_target: msi
exe: ".exe"
push_path: windows
push_bucket: downloads.mongodb.org
push_name: windows
push_arch: x86_64
multiversion_platform: windows_x86_64-2008plus-ssl
multiversion_platform_42_or_later: windows_x86_64-2012plus
multiversion_platform_44_or_later: windows
content_type: application/zip
compile_flags: --ssl MONGO_DISTMOD=windows -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
large_distro_name: windows-64-vs2019-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- windows-64-vs2019-large
- name: .aggfuzzer
- name: .aggregation !.auth !.encrypt !.unwind
- name: auth_gen
- name: .auth .gle
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
# Some concurrency workloads require a lot of memory, so we use machines
# with more RAM for these suites.
- name: .concurrency !.ubsan !.no_txns !.kill_terminate !.common !.debug_only
distros:
- windows-64-vs2019-large
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- windows-64-vs2019-large
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common !.auth
- name: jsonSchema
- name: .jstestfuzz !.initsync !.flow_control !.stepdowns
- name: multiversion_gen
- name: multiversion_auth_gen
- name: .powercycle
- name: .query_fuzzer
- name: .read_write_concern
- name: replica_sets
- name: replica_sets_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: .updatefuzzer
- name: push
distros:
- rhel70-small
- name: enterprise-windows-debug-unoptimized
display_name: Enterprise Windows DEBUG (Unoptimized)
modules:
- enterprise
run_on:
- windows-64-vs2019-small
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
exe: ".exe"
content_type: application/zip
compile_flags: --dbg=on --opt=off --ssl MONGO_DISTMOD=windows CPPPATH="c:/sasl/include c:/snmp/include" LIBPATH="c:/sasl/lib c:/snmp/lib" -j$(( $(grep -c ^processor /proc/cpuinfo) / 2 )) --win-version-min=win10
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
python: '/cygdrive/c/python/python37/python.exe'
ext: zip
scons_cache_scope: shared
tasks:
# This variant tests that unoptimized, DEBUG mongos and mongod binaries can run on Windows.
# It has a minimal amount of tasks because unoptimized builds are slow, which causes
# timing-sensitive tests to fail.
- name: compile_TG
distros:
- windows-64-vs2019-large
- name: audit
# Do not add more tasks to this list.
###########################################
# OSX buildvariants #
###########################################
- name: macos
display_name: macOS
run_on:
- macos-1014
expansions:
push_path: osx
push_bucket: downloads.mongodb.org
push_name: macos
push_arch: x86_64
compile_env: DEVELOPER_DIR=/Applications/Xcode10.2.app
compile_flags: --ssl -j$(sysctl -n hw.logicalcpu) --libc++ --variables-files=etc/scons/xcode_macosx.vars
resmoke_jobs_max: 6
tasks:
- name: compile_all_run_unittests_TG
- name: .aggregation !.auth !.encrypt !.unwind
- name: .auth .gle
- name: auth_gen
- name: .causally_consistent !.sharding
- name: .change_streams !.secondary_reads
- name: .misc_js
- name: .concurrency !.ubsan !.no_txns !.debug_only !.kill_terminate
- name: disk_wiredtiger
- name: free_monitoring
- name: initial_sync_fuzzer_gen
- name: .jscore .common !.auth
- name: .jstestfuzz .causal
- name: .jstestfuzz .interrupt
- name: .jstestfuzz .common
- name: .jstestfuzz .session
- name: .logical_session_cache .one_sec
- name: .query_fuzzer
- name: .read_write_concern !.linearize
- name: replica_sets
- name: replica_sets_kill_secondaries_jscore_passthrough
- name: .replica_sets .common !.auth
- name: retryable_writes_jscore_passthrough_gen
- name: .rollbackfuzzer
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .txns
- name: .ssl
- name: .stitch
- name: push
distros:
- rhel70-small
- name: macos-debug
display_name: "* macOS DEBUG"
batchtime: 60 # 1 hour
run_on:
- macos-1014
expansions:
resmoke_jobs_max: 6
compile_env: DEVELOPER_DIR=/Applications/Xcode10.2.app
compile_flags: --ssl --dbg=on --opt=on -j$(sysctl -n hw.logicalcpu) --libc++ --variables-files=etc/scons/xcode_macosx.vars
tasks:
- name: compile_all_run_unittests_TG
- name: aggregation
- name: auth_gen
- name: causally_consistent_jscore_txns_passthrough
- name: disk_wiredtiger
- name: failpoints
- name: .jscore .common !.auth !.sharding
- name: jsCore_txns_large_txns_format
- name: mongosTest
- name: replica_sets
- name: replica_sets_large_txns_format_gen
- name: .ssl
- name: .stitch
- name: enterprise-macos
display_name: Enterprise macOS
modules:
- enterprise
run_on:
- macos-1014
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: osx
push_bucket: downloads.10gen.com
push_name: macos
push_arch: x86_64-enterprise
mh_target: archive-mh archive-mh-debug
compile_env: DEVELOPER_DIR=/Applications/Xcode10.2.app
compile_flags: --ssl -j$(sysctl -n hw.logicalcpu) --libc++ --variables-files=etc/scons/xcode_macosx.vars
resmoke_jobs_max: 6
tasks:
- name: compile_all_run_unittests_TG
- name: audit
- name: auth_audit_gen
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.sharding !.aggregation !.jscore
- name: .jscore .common !.compat !.decimal !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: mqlrun
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: push
distros:
- rhel70-small
###########################################
# Embedded SDK buildvariants #
###########################################
- name: embedded-sdk-macos
display_name: "Embedded SDK - macOS"
run_on:
- macos-1014
batchtime: 10080 # 7 days
expansions:
test_flags: --excludeWithAnyTags=uses_transactions
cmake_path: /Applications/cmake-3.11.0-Darwin-x86_64/CMake.app/Contents/bin/cmake
compile_env: DEVELOPER_DIR=/Applications/Xcode10.2.app
compile_flags: >-
--lto
--variables-files=etc/scons/xcode_macosx.vars
-j$(sysctl -n hw.logicalcpu)
FRAMEWORKPATH='$BUILD_ROOT/mongo-embedded-sdk-$MONGO_VERSION/Frameworks'
cdriver_cmake_osx_deployment_target: "10.12"
cdriver_cmake_flags: >-
-DCMAKE_BUILD_TYPE=RelWithDebInfo
-DCMAKE_OSX_SYSROOT="$(xcrun --sdk macosx --show-sdk-path)"
-DCMAKE_OSX_ARCHITECTURES=x86_64
-DENABLE_APPLE_FRAMEWORK=ON
-DCMAKE_INSTALL_BINDIR=Frameworks
-DENABLE_SSL=DARWIN
-DENABLE_ZLIB=BUNDLED
-DCMAKE_C_FLAGS="-Wunguarded-availability"
disable_unit_tests: true
dump_scons_config_on_failure: true
tasks:
- name: embedded_sdk_build_and_test
- name: embedded-sdk-ubuntu-1604-x86_64
display_name: "Embedded SDK - Ubuntu 16.04 x86_64"
run_on:
- ubuntu1604-build
expansions:
test_flags: --excludeWithAnyTags=uses_transactions
# We need --allocator=system here to work around SERVER-27675
compile_flags: >-
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
-j$(grep -c ^processor /proc/cpuinfo)
LIBPATH="\$BUILD_ROOT/mongo-embedded-sdk-\$MONGO_VERSION/lib"
cdriver_cmake_flags: >-
-DCMAKE_BUILD_TYPE=RelWithDebInfo
-DCMAKE_C_COMPILER=/opt/mongodbtoolchain/v3/bin/gcc
-DCMAKE_CXX_COMPILER=/opt/mongodbtoolchain/v3/bin/g++
-DCMAKE_C_FLAGS="-flto"
-DCMAKE_INSTALL_RPATH=\$ORIGIN/../lib
disable_unit_tests: true
dump_scons_config_on_failure: true
tasks:
- name: embedded_sdk_build_and_test
###########################################
# Redhat buildvariants #
###########################################
- name: enterprise-rhel-62-64-bit
display_name: "! Enterprise RHEL 6.2"
batchtime: 60 # 1 hour
modules:
- enterprise
run_on:
- rhel62-small
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-rhel62
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel62
repo_edition: enterprise
scons_cache_scope: shared
jstestfuzz_num_generated_files: 40
jstestfuzz_concurrent_num_files: 10
target_resmoke_time: 10
max_sub_suites: 100
large_distro_name: rhel62-large
burn_in_tag_buildvariants: enterprise-rhel-62-64-bit-majority-read-concern-off enterprise-rhel-62-64-bit-inmem linux-64-duroff enterprise-rhel-62-64-bit-multiversion
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: lint_pylinters
- name: lint_clang_format
- name: lint_eslint
- name: lint_cpplint
- name: lint_fuzzer_sanity_patch
- name: lint_yaml
- name: lint_errorcodes
- name: burn_in_tests_gen
- name: burn_in_tests_multiversion_gen
- name: .aggfuzzer
- name: .aggregation
- name: audit
- name: .auth
- name: burn_in_tags_gen
- name: buildscripts_test
- name: unittest_shell_hang_analyzer_gen
- name: .causally_consistent !.sharding
- name: .change_streams
- name: .misc_js
- name: .concurrency !.large !.ubsan !.no_txns !.debug_only
- name: .concurrency .large !.ubsan !.no_txns !.debug_only
distros:
- rhel62-large
- name: disk_wiredtiger
- name: .encrypt
- name: idl_tests
- name: initial_sync_fuzzer_gen
- name: .integration
distros:
- rhel62-large
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: .jstestfuzz !.flow_control # Flow control jstestfuzz take longer.
- name: libunwind_tests
- name: multiversion_sanity_check_gen
- name: mqlrun
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: multiversion_gen
- name: .query_fuzzer
- name: .random_multiversion_replica_sets
- name: .read_write_concern .large
distros:
- rhel62-large
- name: .read_write_concern !.large
- name: .replica_sets !.encrypt !.auth
distros:
- rhel62-large
- name: replica_sets_reconfig_jscore_passthrough
- name: retryable_writes_jscore_passthrough_gen
- name: retryable_writes_jscore_stepdown_passthrough
distros:
- rhel62-large
- name: .read_only
- name: .rollbackfuzzer
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .txns
- name: .sharding .common
- name: snmp
- name: .stitch
- name: .updatefuzzer
- name: secondary_reads_passthrough_gen
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: server_discovery_and_monitoring_json_test_TG
- name: enterprise-rhel-62-64-bit-large-txns-format
display_name: "Enterprise RHEL 6.2 (large transactions format)"
modules:
- enterprise
run_on:
- rhel62-small
batchtime: 10080 # 7 days
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
compile_flags: >-
--ssl
MONGO_DISTMOD=rhel62
-j$(grep -c ^processor /proc/cpuinfo)
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
test_flags: >-
--mongodSetParameters="{maxNumberOfTransactionOperationsInSingleOplogEntry: 2}"
--excludeWithAnyTags=exclude_from_large_txns
tasks:
- name: compile_TG
distros:
- rhel62-large
- name: auth_gen
- name: auth_audit_gen
- name: causally_consistent_jscore_txns_passthrough
- name: change_streams
- name: change_streams_whole_db_passthrough
- name: change_streams_whole_cluster_passthrough
- name: concurrency_replication
- name: concurrency_replication_multi_stmt_txn
- name: concurrency_sharded_replication
- name: concurrency_sharded_replication_with_balancer
- name: concurrency_sharded_clusterwide_ops_add_remove_shards
- name: concurrency_sharded_local_read_write_multi_stmt_txn
- name: concurrency_sharded_local_read_write_multi_stmt_txn_with_balancer
- name: concurrency_sharded_multi_stmt_txn
- name: concurrency_sharded_multi_stmt_txn_with_balancer
- name: concurrency_sharded_multi_stmt_txn_with_stepdowns
- name: concurrency_sharded_with_stepdowns
- name: concurrency_sharded_with_stepdowns_and_balancer
- name: initial_sync_fuzzer_gen
- name: jsCore
- name: jsCore_txns
- name: .logical_session_cache .repl
- name: .multi_shard
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: multiversion_auth_gen
- name: multiversion_gen
- name: noPassthrough_gen
- name: .replica_sets !.multi_oplog !.large
- name: .replica_sets !.multi_oplog .large
distros:
- rhel62-large
- name: .rollbackfuzzer
- name: .sharding .txns
- name: sharding_gen
- name: sharding_auth_gen
- name: sharding_auth_audit_gen
- name: sharding_ese_gen
- name: sharding_ese_gcm_gen
- name: sharding_csrs_continuous_config_stepdown_gen
- name: sharded_multi_stmt_txn_jscore_passthrough
distros:
- rhel62-large
- name: enterprise-rhel-62-64-bit-majority-read-concern-off
display_name: "Enterprise RHEL 6.2 (majority read concern off)"
modules:
- enterprise
run_on:
- rhel62-small
expansions: &enterprise-rhel-62-64-bit-majority-read-concern-off-expansions
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# Ban tests that run prepareTransaction or multi-shard transactions (which use
# prepareTransaction). prepareTransaction is rejected on nodes with
# enableMajorityReadConcern:"false" (SERVER-37559).
test_flags: >-
--majorityReadConcern=off
--excludeWithAnyTags=requires_majority_read_concern,uses_prepare_transaction,uses_multi_shard_transaction,uses_atclustertime
compile_flags: >-
-j$(grep -c ^processor /proc/cpuinfo)
--ssl
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
MONGO_DISTMOD=rhel62
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .aggfuzzer !.wildcard
- name: .aggregation !.read_write_concern
- name: audit
- name: .auth !.multiversion
- name: buildscripts_test
- name: .causally_consistent !.txns
- name: causally_consistent_jscore_txns_passthrough
- name: .change_streams
- name: .misc_js !.non_read_maj
- name: .concurrency .common !.read_concern_maj
- name: .concurrency .no_txns
- name: concurrency_sharded_causal_consistency_gen
- name: concurrency_sharded_causal_consistency_and_balancer
- name: concurrency_replication_causal_consistency
distros:
- rhel62-large
- name: disk_wiredtiger
- name: .encrypt
- name: .integration
distros:
- rhel62-large
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsonSchema
- name: .jstestfuzz !.flow_control
- name: .logical_session_cache .repl
- name: multiversion_gen
- name: .random_multiversion_replica_sets
- name: .read_write_concern .large
distros:
- rhel62-large
- name: .replica_sets !.auth !.encrypt !.non_maj_read
distros:
- rhel62-large
- name: retryable_writes_jscore_passthrough_gen
- name: .read_only
- name: .rollbackfuzzer
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
# Sharded transactions suites exclude tests that use snapshot read concern, since snapshot read
# concern uses 'atClusterTime' in sharded clusters, and this is not supported with
# enableMajorityReadConcern=false.
- name: .sharding .jscore !.txns
- name: .sharding .common
- name: snmp
- name: .updatefuzzer
# This build variant is used to run multiversion tests as part of burn_in_tags as these tests are
# currently only run on our daily builders.
- name: enterprise-rhel-62-64-bit-multiversion
display_name: "Enterprise RHEL 6.2 (implicit multiversion)"
modules:
- enterprise
run_on:
- rhel62-small
expansions: &enterprise-rhel-62-64-bit-multiversion
test_flags: >-
--excludeWithAnyTags=requires_fcv_44,multiversion_incompatible
compile_flags: >-
-j$(grep -c ^processor /proc/cpuinfo)
--ssl
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
MONGO_DISTMOD=rhel62
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
tooltags: "ssl sasl gssapi"
build_mongoreplay: true
large_distro_name: rhel62-large
resmoke_jobs_factor: 0.25
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .random_multiversion_replica_sets
- name: enterprise-rhel-62-64-bit-flow-control-off
display_name: "Enterprise RHEL 6.2 (flow control off)"
modules:
- enterprise
run_on:
- rhel62-small
batchtime: 10080 # 7 days
expansions: &enterprise-rhel-62-64-bit-flow-control-off
additional_targets: archive-mongocryptd archive-mongocryptd-debug
test_flags: >-
--flowControl=off
--excludeWithAnyTags=requires_flow_control
compile_flags: >-
-j$(grep -c ^processor /proc/cpuinfo)
--ssl
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
MONGO_DISTMOD=rhel62
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
tasks:
- name: compile_TG
distros:
- rhel62-large
- name: dbtest_TG
distros:
- rhel62-large
- name: .aggfuzzer
- name: .aggregation
- name: audit
- name: .auth
- name: .causally_consistent !.wo_snapshot
- name: .change_streams
- name: .misc_js
- name: .concurrency !.ubsan !.no_txns !.debug_only !.large
- name: .concurrency !.ubsan !.no_txns !.debug_only .large
distros:
- rhel62-large
- name: disk_wiredtiger
- name: .encrypt
- name: initial_sync_fuzzer_gen
- name: .integration
distros:
- rhel62-large
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: .jstestfuzz !.flow_control
- name: .logical_session_cache
- name: .multi_shard .common
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: multi_shard_multi_stmt_txn_kill_primary_jscore_passthrough_gen
- name: multiversion_gen
- name: .read_write_concern !.aggregation
distros:
- rhel62-large
- name: .replica_sets !.encrypt !.auth
distros:
- rhel62-large
- name: retryable_writes_jscore_passthrough_gen
- name: .read_only
- name: .retry
- name: .rollbackfuzzer
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot
- name: .sharding .common
- name: snmp
- name: .updatefuzzer
- name: enterprise-rhel-62-64-bit-single-phase-index-builds
display_name: "Enterprise RHEL 6.2 (single phase index builds)"
modules:
- enterprise
run_on:
- rhel62-small
expansions: &enterprise-rhel-62-64-bit-single-phase-index-builds
additional_targets: archive-mongocryptd archive-mongocryptd-debug
test_flags: >-
--mongodSetParameters="{enableTwoPhaseIndexBuild: false}"
compile_flags: >-
-j$(grep -c ^processor /proc/cpuinfo)
--ssl
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
MONGO_DISTMOD=rhel62
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
tasks:
- name: compile_TG
distros:
- rhel62-large
- name: .concurrency !.large !.ubsan !.no_txns !.debug_only
- name: .jscore .common
- name: noPassthrough_gen
- name: noPassthroughWithMongod_gen
- name: .replica_sets !.encrypt !.auth
distros:
- rhel62-large
- name: .rollbackfuzzer
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: enterprise-rhel-62-64-bit-two-phase-index-build-commit-quorum-off
display_name: "Enterprise RHEL 6.2 (2 phase index build commit quorum off)"
modules:
- enterprise
run_on:
- rhel62-small
batchtime: 10080 # 7 days
expansions: &enterprise-rhel-62-64-bit-two-phase-index-build-commit-quorum-off
test_flags: >-
--mongodSetParameters="{enableIndexBuildCommitQuorum: false}"
compile_flags: >-
-j$(grep -c ^processor /proc/cpuinfo)
--ssl
--variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
MONGO_DISTMOD=rhel62
multiversion_platform: rhel62
multiversion_edition: enterprise
repo_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
tasks:
- name: compile_TG
distros:
- rhel62-large
- name: .concurrency !.large !.ubsan !.no_txns !.debug_only
- name: .jscore .common
- name: noPassthrough_gen
- name: noPassthroughWithMongod_gen
- name: .replica_sets !.encrypt !.auth
distros:
- rhel62-large
- name: .rollbackfuzzer
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: enterprise-rhel-62-64-bit-coverage
display_name: "~ Enterprise RHEL 6.2 DEBUG Code Coverage"
modules:
- enterprise
run_on:
- rhel62-large
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
compile_flags: --dbg=on --gcov --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: enterprise
resmoke_jobs_factor: 0.5 # Avoid starting too many mongod's
# The gcov instrumentation saves the path the .gcno files were created in as the default path
# for the .gcda files. In Evergreen the path will start with /data/mci/[Hashed ID]/src/... where
# the hashed ID is unique per task run. GCOV_PREFIX_STRIP is the number of directory levels to
# strip from the top of the default path before appending to the GCOV_PREFIX (if any).
gcov_environment: GCOV_PREFIX=$(pwd) GCOV_PREFIX_STRIP=4
# Mixing --cache and --gcov doesn't work correctly yet. See SERVER-11084
timeout_secs: 10800 # 3 hour timeout
use_scons_cache: false
tasks:
- name: compile_all_run_unittests_TG
- name: .aggregation !.unwind
- name: audit
- name: .auth
- name: causally_consistent_jscore_txns_passthrough
- name: .change_streams
- name: .misc_js
- name: .concurrency !.ubsan !.no_txns !.stepdowns !.kill_terminate
- name: disk_wiredtiger
- name: .encrypt
- name: initial_sync_fuzzer_gen
- name: .integration !.audit
- name: .jscore .common
- name: jsCore_txns_large_txns_format
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: .multi_shard .common
- name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .query_fuzzer
- name: .random_multiversion_replica_sets
- name: .read_write_concern
- name: .replica_sets
- name: .read_only
- name: .rollbackfuzzer
- name: retryable_writes_jscore_passthrough_gen
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot
- name: .sharding .common
- name: snmp
- name: .updatefuzzer
- &enterprise-rhel-70-64-bit-template
name: enterprise-rhel-70-64-bit
display_name: "* Enterprise RHEL 7.0"
batchtime: 240 # 4 hours
modules:
- enterprise
run_on:
- rhel70-small
expansions: &enterprise-rhel-70-64-bit-expansions-template
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-rhel70
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel70
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel70
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel70
- name: .aggfuzzer
- name: audit
- name: auth_audit_gen
- name: auth_gen
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.sharding !.replica_sets !.aggregation !.jscore
- name: external_auth
- name: external_auth_aws
- name: .jscore .common !.compat !.decimal !.sharding
- name: jsCore_txns_large_txns_format
- name: .jstestfuzz .common
- name: libunwind_tests
- name: .logical_session_cache .one_sec
- name: .ocsp
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: .replica_sets .multi_oplog
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: sharding_auth_audit_gen
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
distros:
- rhel70
- name: ubi7
display_name: "UBI 7"
run_on:
- ubi7
expansions:
resmoke_jobs_factor: 1
disable_shared_scons_cache: true
compile_flags: MONGO_DISTMOD=ubuntu1604 --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
tooltags: ""
build_mongoreplay: true
test_flags: >-
--excludeWithAnyTags=requires_os_access
tasks:
- name: compile_TG
distros:
- rhel70-large
- name: jsCore
- name: sharding_gen
- name: replica_sets
- &enterprise-rhel-80-64-bit-template
name: enterprise-rhel-80-64-bit
display_name: "Enterprise RHEL 8.0"
modules:
- enterprise
run_on:
- rhel80-build
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-rhel80
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=rhel80 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel80
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel80
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel80-build
- name: .aggfuzzer
- name: audit
- name: auth_audit_gen
- name: auth_gen
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.sharding !.replica_sets !.aggregation !.jscore
- name: external_auth
- name: external_auth_aws
- name: .jscore .common !.compat !.decimal !.sharding
- name: jsCore_txns_large_txns_format
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: libunwind_tests
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: .replica_sets .multi_oplog
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: sharding_auth_audit_gen
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
# This variant is to intentionally test uncommon features nightly
- <<: *enterprise-rhel-70-64-bit-template
name: enterprise-rhel-70-64-bit-kitchen-sink
display_name: "~ Enterprise RHEL 7.0 (Dagger)"
batchtime: 1440 # 1 day
expansions:
<<: *enterprise-rhel-70-64-bit-expansions-template
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
additional_targets: dagger # If this moves to another variant, update the compile_all task
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel70
- name: jsCore
- <<: *enterprise-rhel-70-64-bit-template
name: hot_backups-rhel-70-64-bit
display_name: "hot_backups RHEL 7.0"
batchtime: 1440 # 1 day
run_on:
- rhel70
expansions:
<<: *enterprise-rhel-70-64-bit-expansions-template
additional_targets: ""
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --enterprise-features=hot_backups
has_packages: false
mh_target: ""
tasks:
- name: compile_all_run_unittests_TG
- name: jsCore
- name: noPassthrough_gen
- <<: *enterprise-rhel-70-64-bit-template
name: enterprise-rhel-70-64-bit-no-libunwind
display_name: "~ Enterprise RHEL 7.0 (no-libunwind)"
batchtime: 10080 # 1 week
run_on:
- rhel70
expansions:
<<: *enterprise-rhel-70-64-bit-expansions-template
compile_flags: --ssl MONGO_DISTMOD=rhel70 --use-libunwind=off --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
has_packages: false
mh_target: ""
# Override list of tasks to exclude package testing and publishing
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel70
- name: .aggfuzzer
- name: audit
- name: auth_audit_gen
- name: auth_gen
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.sharding !.replica_sets !.aggregation !.jscore
- name: external_auth
- name: external_auth_aws
- name: .jscore .common !.compat !.decimal !.sharding
- name: jsCore_txns_large_txns_format
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: .replica_sets .multi_oplog
- name: sasl
- name: search
- name: search_auth
- name: search_ssl
- name: sharding_auth_audit_gen
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: ubuntu1604-debug
display_name: "* Ubuntu 16.04 DEBUG"
batchtime: 240 # 4 hours
run_on:
- ubuntu1604-test
expansions:
resmoke_jobs_factor: 0.5 # Avoid starting too many mongod's
compile_flags: MONGO_DISTMOD=ubuntu1604 --dbg=on --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
scons_cache_scope: shared
tasks:
- name: compile_TG
distros:
- ubuntu1604-build
- name: jsCore
- name: .read_write_concern !.write !.aggregation
- name: replica_sets_jscore_passthrough
- name: replica_sets_large_txns_format_jscore_passthrough
- name: sharded_collections_jscore_passthrough
- name: sharding_gen
- name: sharding_auth_gen
- name: .stitch
- name: ubuntu1604-container
display_name: "Ubuntu 16.04 Container"
run_on:
- ubuntu1604-container-server
expansions:
resmoke_jobs_factor: 1
disable_shared_scons_cache: true
compile_flags: MONGO_DISTMOD=ubuntu1604 --opt=on -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
tooltags: ""
build_mongoreplay: true
test_flags: >-
--excludeWithAnyTags=requires_os_access
tasks:
- name: compile_TG
distros:
- ubuntu1604-build
- name: jsCore
- name: sharding_gen
- name: replica_sets
- name: rhel62
display_name: RHEL 6.2
run_on:
- rhel62-small
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-rhel62
compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: rhel62
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.multiversion !.audit
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- rhel62-large
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: rhel70
display_name: RHEL 7.0
run_on:
- rhel70-small
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-rhel70
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel70
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: rhel70
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel70
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
distros:
- rhel70
- name: concurrency_replication_causal_consistency
distros:
- rhel70
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
distros:
- rhel70
- name: rhel80
display_name: RHEL 8.0
run_on:
- rhel80-build
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-rhel80
compile_flags: --ssl MONGO_DISTMOD=rhel80 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel80
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: rhel80
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel80-build
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
distros:
- rhel80-build
- name: concurrency_replication_causal_consistency
distros:
- rhel80-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
# This variant compiles on RHEL 7.0 and runs tests on RHEL 7.6
- name: rhel76_compile_rhel70
display_name: RHEL 7.0/7.6 Cross-ABI
run_on:
- rhel76-test
expansions:
compile_flags: --ssl MONGO_DISTMOD=rhel70 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
tasks:
- name: compile_TG
distros:
- rhel70
- name: ssl_gen
- name: jsCore
- name: external_auth
- name: enterprise-rhel-71-ppc64le
display_name: Enterprise RHEL 7.1 PPC64LE
modules:
- enterprise
run_on:
- rhel71-power8-test
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need to compensate for SMT8 setting the cpu count very high and lower the amount of parallelism down
compile_flags: --ssl MONGO_DISTMOD=rhel71 -j$(echo "$(grep -c processor /proc/cpuinfo)/2" | bc) CCFLAGS="-mcpu=power8 -mtune=power8 -mcmodel=medium" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_factor: 0.25
has_packages: true
packager_script: packager_enterprise.py
packager_arch: ppc64le
packager_distro: rhel71
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: ppc64le-enterprise-rhel71
repo_edition: enterprise
multiversion_platform: rhel71
multiversion_architecture: ppc64le
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel71-power8-build
- name: .aggregation .common
- name: audit
- name: .auth !.multiversion !.jscore
- name: .misc_js
- name: .encrypt
- name: .integration !.audit
distros:
- rhel71-power8-build
- name: .jscore .common !.auth
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- name: .publish
distros:
- rhel70
- name: enterprise-rhel-72-s390x
display_name: Enterprise RHEL 7.2 s390x
modules:
- enterprise
run_on:
- rhel72-zseries-test
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
release_buid: true
compile_flags: --ssl MONGO_DISTMOD=rhel72 -j3 CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 2
has_packages: true
packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: rhel72
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: s390x-enterprise-rhel72
repo_edition: enterprise
multiversion_platform: rhel72
multiversion_architecture: s390x
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel72-zseries-build
- name: .aggregation .common
- name: audit
- name: .auth !.multiversion !.jscore
- name: .misc_js
- name: .encrypt
- name: .integration !.audit
distros:
- rhel72-zseries-build
- name: .jscore .common !.auth
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- name: .publish
distros:
- rhel70
- name: enterprise-rhel-67-s390x
display_name: Enterprise RHEL 6.7 s390x
modules:
- enterprise
run_on:
- rhel67-zseries-test
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
compile_flags: --ssl MONGO_DISTMOD=rhel67 -j3 CCFLAGS="-march=z9-109 -mtune=z10" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --use-hardware-crc32=off
has_packages: true
packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: rhel67
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: s390x-enterprise-rhel67
repo_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel67-zseries-build
- name: .aggregation .common
- name: audit
- name: .auth !.multiversion
- name: .misc_js
- name: .encrypt
- name: .integration !.audit !.sharded
distros:
- rhel67-zseries-build
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: replica_sets_jscore_passthrough
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- name: secondary_reads_passthrough_gen
- name: .publish
distros:
- rhel62-large
- name: rhel-72-s390x
display_name: RHEL 7.2 s390x
run_on:
- rhel72-zseries-test
expansions:
compile_flags: --ssl MONGO_DISTMOD=rhel72 -j3 CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
has_packages: true
packager_script: packager.py
packager_arch: s390x
packager_distro: rhel72
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: s390x-rhel72
repo_edition: org
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel72-zseries-build
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- name: .publish
distros:
- rhel70
- name: rhel-67-s390x
display_name: RHEL 6.7 s390x
run_on:
- rhel67-zseries-test
expansions:
compile_flags: --ssl MONGO_DISTMOD=rhel67 -j3 CCFLAGS="-march=z9-109 -mtune=z10" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --use-hardware-crc32=off
has_packages: true
packager_script: packager.py
packager_arch: s390x
packager_distro: rhel67
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: s390x-rhel67
repo_edition: org
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel67-zseries-build
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- name: .publish
distros:
- rhel62-large
###########################################
# Ubuntu buildvariants #
###########################################
- name: enterprise-ubuntu1604-64
display_name: Enterprise Ubuntu 16.04
modules:
- enterprise
run_on:
- ubuntu1604-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
lang_environment: LANG=C
push_arch: x86_64-enterprise-ubuntu1604
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: ubuntu1604
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: ubuntu1604
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: .aggfuzzer .common
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.aggregation !.sharding !.jscore
- name: .jepsen
distros:
- ubuntu1604-build
- name: .jscore .common !.compat !.decimal !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: .watchdog
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-ubuntu-dynamic-1604-clang
display_name: "! Shared Library Enterprise Ubuntu 16.04 (Clang)"
batchtime: 240 # 4 hours
modules:
- enterprise
run_on:
- ubuntu1604-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
lang_environment: LANG=C
compile_flags: --link-model=dynamic -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars
scons_cache_scope: shared
scons_cache_mode: all
multiversion_platform: ubuntu1604
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: buildscripts_test
- name: server_discovery_and_monitoring_json_test_TG
- name: enterprise-ubuntu-dynamic-1604-clang-tidy
display_name: "* Enterprise Clang Tidy"
batchtime: 240 # 4 hours
modules:
- enterprise
run_on:
- ubuntu1604-build
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
lang_environment: LANG=C
compile_flags: --link-model=dynamic -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars
scons_cache_scope: shared
scons_cache_mode: all
tasks:
- name: clang_tidy_TG
- name: ubuntu-dynamic-1604-clang
display_name: "* Shared Library Ubuntu 16.04 (Clang)"
batchtime: 240 # 4 hours
run_on:
- ubuntu1604-test
expansions:
lang_environment: LANG=C
compile_flags: --link-model=dynamic -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars
scons_cache_scope: shared
scons_cache_mode: all
multiversion_platform: ubuntu1604
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: buildscripts_test
###########################################
# SUSE buildvariants #
###########################################
- name: enterprise-suse12-64
display_name: Enterprise SLES 12
modules:
- enterprise
run_on:
- suse12-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-suse12
compile_flags: --ssl MONGO_DISTMOD=suse12 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: suse12
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: suse12
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse12-build
- name: .aggfuzzer .common
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.aggregation !.sharding !.jscore
- name: .jscore .common !.compat !.decimal !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-suse12-s390x
display_name: Enterprise SLES 12 s390x
modules:
- enterprise
run_on:
- suse12-zseries-test
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: s390x-enterprise-suse12
compile_flags: --ssl MONGO_DISTMOD=suse12 -j3 CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
has_packages: true
packager_script: packager_enterprise.py
packager_arch: s390x
packager_distro: suse12
repo_edition: enterprise
multiversion_platform: suse12
multiversion_architecture: s390x
multiversion_edition: enterprise
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse12-zseries-build
- name: .aggregation .common
- name: audit
- name: .auth !.multiversion !.jscore
- name: .misc_js
- name: .encrypt
- name: .integration !.audit
distros:
- suse12-zseries-build
- name: .jscore .common !.auth
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion
- name: snmp
- name: .stitch
- name: secondary_reads_passthrough_gen
- name: .publish
distros:
- suse12-test
- name: suse12-s390x
display_name: SLES 12 s390x
run_on:
- suse12-zseries-test
batchtime: 10080 # 7 days
stepback: false
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: s390x-suse12
compile_flags: --ssl MONGO_DISTMOD=suse12 -j3 CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
has_packages: true
packager_script: packager.py
packager_arch: s390x
packager_distro: suse12
repo_edition: org
multiversion_platform: suse12
multiversion_architecture: s390x
multiversion_edition: targeted
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse12-zseries-build
- name: jsCore
- name: replica_sets_jscore_passthrough
- name: ssl_gen
- name: .publish
distros:
- suse12-test
- name: suse12
display_name: SUSE 12
run_on:
- suse12-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-suse12
compile_flags: --ssl MONGO_DISTMOD=suse12 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: suse12
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: suse12
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse12-build
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- suse12-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: multiversion_gen
- name: .logical_session_cache .one_sec
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: .publish
- name: enterprise-suse15-64
display_name: Enterprise SLES 15
modules:
- enterprise
run_on:
- suse15-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-suse15
compile_flags: --ssl MONGO_DISTMOD=suse15 --use-libunwind=off -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: suse15
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse15-build
- name: .aggfuzzer .common !.multiversion
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.aggregation !.sharding !.jscore
- name: .jscore .common !.decimal !.compat !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: .publish
- name: suse15
display_name: SUSE 15
run_on:
- suse15-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-suse15
compile_flags: --ssl MONGO_DISTMOD=suse15 --use-libunwind=off -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: suse15
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: suse15
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- suse15-build
- name: .aggfuzzer .common !.multiversion
- name: aggregation
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- suse15-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .txns
- name: .sharding .common !.op_query !.csrs !.multiversion
- name: .ssl
- name: .stitch
- name: .publish
###########################################
# Debian buildvariants #
###########################################
- name: enterprise-debian92-64
display_name: Enterprise Debian 9.2
modules:
- enterprise
run_on:
- debian92-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-debian92
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=debian92 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: debian92
multiversion_edition: enterprise
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: debian92
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- debian92-build
- name: .aggfuzzer .common
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.aggregation !.sharding !.jscore
- name: .jscore .common !.decimal !.compat !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: debian92
display_name: Debian 9.2
run_on:
- debian92-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-debian92
compile_flags: --ssl MONGO_DISTMOD=debian92 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
multiversion_platform: debian92
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: debian92
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- debian92-build
- name: .aggfuzzer .common
- name: aggregation
- name: aggregation_auth
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- debian92-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: enterprise-debian10-64
display_name: Enterprise Debian 10
modules:
- enterprise
run_on:
- debian10-test
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
push_path: linux
push_bucket: downloads.10gen.com
push_name: linux
push_arch: x86_64-enterprise-debian10
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=debian10 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: debian10
multiversion_edition: enterprise
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: debian10
repo_edition: enterprise
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- debian10-build
- name: .aggfuzzer .common
- name: audit
- name: causally_consistent_jscore_txns_passthrough
- name: .encrypt !.replica_sets !.aggregation !.sharding !.jscore
- name: .jscore .common !.decimal !.compat !.sharding
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: replica_sets_auth_gen
- name: replica_sets_jscore_passthrough
- name: sasl
- name: sharding_auth_gen
- name: snmp
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
- name: debian10
display_name: Debian 10
run_on:
- debian10-test
expansions:
push_path: linux
push_bucket: downloads.mongodb.org
push_name: linux
push_arch: x86_64-debian10
compile_flags: --ssl MONGO_DISTMOD=debian10 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
# We invoke SCons using --jobs = (# of CPUs / 4) to avoid causing out of memory errors due to
# spawning a large number of linker processes.
num_scons_link_jobs_available: $(( $(grep -c ^processor /proc/cpuinfo) / 4 ))
multiversion_platform: debian10
multiversion_edition: targeted
has_packages: true
packager_script: packager.py
packager_arch: x86_64
packager_distro: debian10
repo_edition: org
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- debian10-build
- name: .aggfuzzer .common
- name: aggregation
- name: aggregation_auth
- name: .auth !.audit !.multiversion
- name: causally_consistent_jscore_txns_passthrough
- name: .misc_js
- name: .concurrency .common
- name: concurrency_replication_causal_consistency
distros:
- debian10-build
- name: disk_wiredtiger
- name: free_monitoring
- name: .jscore .common !.decimal
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: replica_sets
- name: .replica_sets .common
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.op_query !.csrs
- name: .ssl
- name: .stitch
- name: test_packages
distros:
- ubuntu1604-packer
- name: .publish
################################
# storage engine buildvariants #
################################
- name: rhel-62-64-bit-biggie
display_name: RHEL 6.2 (Biggie)
run_on:
- rhel62-small
expansions:
test_flags: --storageEngine=biggie --excludeWithAnyTags=SERVER-38379,requires_persistence,requires_journaling,uses_transactions,requires_wiredtiger,requires_snapshot_read
compile_flags: MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --dbg=off --opt=on --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
num_jobs_available: $(grep -c ^processor /proc/cpuinfo)
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: jsCore
- name: enterprise-rhel-62-benchmarks
display_name: Enterprise RHEL 6.2 (Benchmarks)
modules:
- enterprise
run_on:
- centos6-perf
expansions:
compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
tasks:
- name: compile_benchmarks
distros:
- rhel62-large
- name: .benchmarks
distros:
- centos6-perf
- name: enterprise-rhel-62-64-bit-inmem
display_name: Enterprise RHEL 6.2 (inMemory)
modules:
- enterprise
run_on:
- rhel62-small
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
test_flags: --storageEngine=inMemory --excludeWithAnyTags=requires_persistence,requires_journaling
compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: enterprise
scons_cache_scope: shared
large_distro_name: rhel62-large
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .aggfuzzer .common
- name: .aggregation !.unwind !.encrypt
- name: audit
- name: .auth !.multiversion
- name: .causally_consistent !.wo_snapshot
- name: .change_streams !.secondary_reads
- name: .misc_js
- name: .concurrency !.ubsan !.no_txns !.debug_only !.kill_terminate
distros:
- rhel62-large # Some workloads require a lot of memory, use a bigger machine for this suite.
- name: initial_sync_fuzzer_gen
- name: .integration !.audit
distros:
- rhel62-large
- name: .jscore .common !.decimal
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: .jstestfuzz !.initsync
- name: .logical_session_cache
- name: .multi_shard .common
- name: multi_stmt_txn_jscore_passthrough_with_migration_gen
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: .replica_sets .multi_oplog
- name: replica_sets_multi_stmt_txn_jscore_passthrough
- name: replica_sets_multi_stmt_txn_stepdown_jscore_passthrough_gen
distros:
- rhel62-large
- name: .retry
- name: rollback_fuzzer_gen
- name: sasl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
- name: sharded_multi_stmt_txn_jscore_passthrough
distros:
- rhel62-large
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion !.csrs
- name: snmp
- name: .ssl
- name: .updatefuzzer
- name: linux-64-ephemeralForTest
display_name: Linux (ephemeralForTest)
run_on:
- rhel62-small
expansions:
# Transactions are not explicitly supported on the ephemeralForTest storage engine.
# Speculative majority reads are currently only allowed for change streams, which are only supported on WiredTiger.
test_flags: --storageEngine=ephemeralForTest --excludeWithAnyTags=requires_persistence,requires_fsync,SERVER-21420,SERVER-21658,requires_journaling,requires_wiredtiger,uses_transactions,requires_document_locking,uses_speculative_majority,requires_snapshot_read,requires_majority_read_concern,uses_change_streams,requires_sharding
compile_flags: -j$(grep -c ^processor /proc/cpuinfo) --dbg=off --opt=on --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: targeted
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel62-large
- name: .aggfuzzer .common
- name: aggregation
- name: .auth !.multiversion !.audit !.sharding
- name: .misc_js
- name: concurrency
distros:
- rhel62-large # Some workloads require a lot of memory, use a bigger machine for this suite.
- name: concurrency_replication
- name: concurrency_replication_causal_consistency
distros:
- rhel62-large
- name: concurrency_simultaneous
- name: concurrency_simultaneous_replication
distros:
- rhel62-large
- name: .integration !.audit
distros:
- rhel62-large
- name: .jscore .common !.txns !.decimal
- name: jsCore_op_query
- name: .jstestfuzz .common
- name: .logical_session_cache .one_sec
- name: .read_write_concern .linearize
- name: replica_sets
- name: .replica_sets .common
- name: rollback_fuzzer_gen
- name: .updatefuzzer
- name: enterprise-rhel-71-ppc64le-inmem
display_name: Enterprise RHEL 7.1 PPC64LE (inMemory) DEBUG
modules:
- enterprise
run_on:
- rhel71-power8-test
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need to compensate for SMT8 setting the cpu count very high and lower the amount of parallelism down
compile_flags: --dbg=on --opt=on --ssl MONGO_DISTMOD=rhel71 -j$(echo "$(grep -c processor /proc/cpuinfo)/2" | bc) CCFLAGS="-mcpu=power8 -mtune=power8 -mcmodel=medium" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_factor: 0.25
test_flags: --storageEngine=inMemory --excludeWithAnyTags=requires_persistence,requires_journaling
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel71-power8-build
- name: .aggregation .common
- name: audit
- name: .auth !.multiversion
- name: .misc_js
- name: .integration !.audit
distros:
- rhel71-power8-build
- name: .jscore .common !.decimal
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion !.csrs
- name: snmp
- name: .ssl
- name: enterprise-rhel-72-s390x-inmem
display_name: Enterprise RHEL 7.2 s390x (inMemory) DEBUG
modules:
- enterprise
run_on:
- rhel72-zseries-test
batchtime: 10080 # 7 days
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
compile_flags: --dbg=on --opt=on --ssl MONGO_DISTMOD=rhel72 -j3 CCFLAGS="-march=z196 -mtune=zEC12" --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
resmoke_jobs_max: 2
test_flags: --storageEngine=inMemory --excludeWithAnyTags=requires_persistence,requires_journaling
tasks:
- name: compile_all_run_unittests_TG
distros:
- rhel72-zseries-build
- name: .aggregation .common
- name: .auth !.multiversion
- name: audit
- name: .misc_js
- name: .integration !.audit
distros:
- rhel72-zseries-build
- name: .jscore .common !.decimal
- name: jsCore_op_query
- name: .read_write_concern
- name: replica_sets
- name: .replica_sets .common
- name: sasl
- name: secondary_reads_passthrough_gen
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.multiversion !.csrs
- name: snmp
- name: .ssl
###########################################
# Experimental buildvariants #
###########################################
- name: ubuntu1804-debug-asan
display_name: ~ ASAN Enterprise Ubuntu 18.04 DEBUG
modules:
- enterprise
run_on:
- ubuntu1804-build
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need llvm-symbolizer in the PATH for ASAN for clang-3.7 or later.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: LSAN_OPTIONS="suppressions=etc/lsan.suppressions:report_objects=1" ASAN_OPTIONS=detect_leaks=1:check_initialization_order=true:strict_init_order=true:abort_on_error=1:disable_coredump=0:handle_abort=1
compile_flags: --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --dbg=on --opt=on --allocator=system --sanitize=address --ssl --enable-free-mon=on -j$(grep -c ^processor /proc/cpuinfo) --nostrip
multiversion_platform: ubuntu1804
multiversion_edition: enterprise
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under ASAN build.
is_asan_build: true
scons_cache_scope: shared
test_flags: --excludeWithAnyTags=requires_fast_memory
tasks:
- name: compile_all_run_unittests_TG
- name: compile_benchmarks
- name: .aggregation
- name: .auth
- name: audit
- name: .benchmarks
- name: .causally_consistent !.wo_snapshot
- name: .change_streams
- name: .misc_js
- name: .concurrency !.ubsan !.no_txns !.kill_terminate
- name: disk_wiredtiger
- name: .encrypt
- name: free_monitoring
- name: external_auth
- name: external_auth_aws
- name: initial_sync_fuzzer_gen
- name: .integration !.standalone !.audit
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: .logical_session_cache
- name: .multi_shard .common
- name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .query_fuzzer
- name: .random_multiversion_replica_sets
- name: .read_write_concern
- name: replica_sets_large_txns_format_gen
- name: replica_sets_large_txns_format_jscore_passthrough
- name: .replica_sets !.multi_oplog
- name: .retry
- name: .read_only
- name: .rollbackfuzzer
- name: sasl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot
- name: .sharding .common !.csrs
- name: snmp
- name: .watchdog
- name: .stitch
- name: unittest_shell_hang_analyzer_gen
- name: .updatefuzzer
- name: server_discovery_and_monitoring_json_test_TG
- name: ubuntu1804-asan
display_name: ~ ASAN Ubuntu 18.04
run_on:
- ubuntu1804-build
stepback: true
expansions:
# We need llvm-symbolizer in the PATH for ASAN for clang-3.7 or later.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: LSAN_OPTIONS="suppressions=etc/lsan.suppressions:report_objects=1" ASAN_OPTIONS=detect_leaks=1:check_initialization_order=true:strict_init_order=true:abort_on_error=1:disable_coredump=0:handle_abort=1
compile_flags: --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --opt=on --allocator=system --sanitize=address --ssl -j$(grep -c ^processor /proc/cpuinfo) --nostrip
multiversion_platform: ubuntu1804
multiversion_edition: enterprise
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under ASAN build.
is_asan_build: true
scons_cache_scope: shared
test_flags: --excludeWithAnyTags=requires_fast_memory
tasks:
- name: compile_all_run_unittests_TG
- name: .aggfuzzer .common
- name: free_monitoring
- name: .jstestfuzz !.initsync
- name: .jstestfuzz_multiversion_gen
- name: ubuntu1804-debug-ubsan
display_name: ~ UBSAN Enterprise Ubuntu 18.04 DEBUG
modules:
- enterprise
run_on:
- ubuntu1804-build
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need llvm-symbolizer in the PATH for UBSAN.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: UBSAN_OPTIONS="print_stacktrace=1"
compile_flags: --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --dbg=on --opt=on --sanitize=undefined --ssl --enable-free-mon=on -j$(grep -c ^processor /proc/cpuinfo) --nostrip
multiversion_platform: ubuntu1804
multiversion_edition: enterprise
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under UBSAN build.
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
- name: compile_benchmarks
- name: .aggregation
- name: .auth
- name: audit
- name: .benchmarks
- name: .causally_consistent !.wo_snapshot
- name: .change_streams
- name: .misc_js
- name: .concurrency !.no_txns !.repl !.kill_terminate
- name: disk_wiredtiger
- name: .encrypt
- name: free_monitoring
- name: initial_sync_fuzzer_gen
- name: .integration !.audit
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: jsonSchema
- name: .logical_session_cache .one_sec
- name: .multi_shard .common
- name: multiversion_gen
- name: .multiversion_fuzzer
- name: .multiversion_passthrough
- name: .random_multiversion_replica_sets
- name: .read_write_concern
- name: replica_sets_large_txns_format_gen
- name: replica_sets_large_txns_format_jscore_passthrough
- name: .replica_sets !.multi_oplog
- name: .retry
- name: .rollbackfuzzer
- name: .read_only
- name: sasl
- name: secondary_reads_passthrough_gen
- name: session_jscore_passthrough
- name: .sharding .jscore !.wo_snapshot
- name: .sharding .common !.csrs
- name: snmp
- name: .stitch
- name: .updatefuzzer
- name: watchdog_wiredtiger
- name: server_discovery_and_monitoring_json_test_TG
- name: ubuntu1804-debug-aubsan-lite
display_name: "! {A,UB}SAN Enterprise Ubuntu 18.04 DEBUG"
batchtime: 240 # 4 hours
modules:
- enterprise
run_on:
- ubuntu1804-build
stepback: true
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need llvm-symbolizer in the PATH for ASAN for clang-3.7 or later.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: UBSAN_OPTIONS="print_stacktrace=1" LSAN_OPTIONS="suppressions=etc/lsan.suppressions:report_objects=1" ASAN_OPTIONS=detect_leaks=1:check_initialization_order=true:strict_init_order=true:abort_on_error=1:disable_coredump=0:handle_abort=1
compile_flags: --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --dbg=on --opt=on --allocator=system --sanitize=undefined,address --ssl -j$(grep -c ^processor /proc/cpuinfo) --nostrip
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under {A,UB}SAN build.
is_asan_build: true
scons_cache_scope: shared
max_sub_suites: 100
tasks:
- name: compile_all_run_unittests_TG
- name: server_discovery_and_monitoring_json_test_TG
- name: jsCore
- name: jsCore_txns
- name: ubuntu1804-debug-aubsan-lite_fuzzer
display_name: "{A,UB}SAN Enterprise Ubuntu 18.04 FUZZER"
modules:
- enterprise
run_on:
- ubuntu1804-build
stepback: false
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
# We need llvm-symbolizer in the PATH for ASAN for clang-3.7 or later.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: UBSAN_OPTIONS="print_stacktrace=1:halt_on_error=1" LSAN_OPTIONS="suppressions=etc/lsan.suppressions:report_objects=1" ASAN_OPTIONS=detect_leaks=1:check_initialization_order=true:strict_init_order=true:abort_on_error=1:disable_coredump=0:handle_abort=1
compile_flags: LINKFLAGS=-nostdlib++ LIBS=stdc++ --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --dbg=on --opt=on --allocator=system --sanitize=undefined,address,fuzzer --ssl -j$(grep -c ^processor /proc/cpuinfo) --nostrip
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under {A,UB}SAN build.
is_asan_build: true
scons_cache_scope: shared
display_tasks:
- *libfuzzertests
tasks:
- name: libfuzzertests_TG
- name: ubuntu1804-debug-aubsan-async
display_name: "~ {A,UB}SAN Enterprise Ubuntu 18.04 async"
modules:
- enterprise
run_on:
- ubuntu1804-build
stepback: true
expansions:
# We need llvm-symbolizer in the PATH for ASAN for clang-3.7 or later.
variant_path_suffix: /opt/mongodbtoolchain/v3/bin
lang_environment: LANG=C
san_options: UBSAN_OPTIONS="print_stacktrace=1" LSAN_OPTIONS="suppressions=etc/lsan.suppressions:report_objects=1" ASAN_OPTIONS=detect_leaks=1:check_initialization_order=true:strict_init_order=true:abort_on_error=1:disable_coredump=0:handle_abort=1
compile_flags: --variables-files=etc/scons/mongodbtoolchain_v3_clang.vars --dbg=on --opt=on --allocator=system --sanitize=undefined,address --ssl -j$(grep -c ^processor /proc/cpuinfo) --nostrip
multiversion_platform: ubuntu1804
multiversion_edition: enterprise
resmoke_jobs_factor: 0.3 # Avoid starting too many mongod's under {A,UB}SAN build.
is_asan_build: true
test_flags: |- # Use the ServiceExecutorAdaptive with a reasonable number of starting threads
--serviceExecutor=adaptive \
--mongodSetParameters="adaptiveServiceExecutorReservedThreads: 8" \
--mongosSetParameters="adaptiveServiceExecutorReservedThreads: 8"
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
- name: .aggregation !.no_async
- name: .auth
- name: audit
- name: .causally_consistent !.wo_snapshot
- name: .misc_js
- name: .concurrency .common !.kill_terminate
- name: concurrency_replication_causal_consistency
- name: disk_wiredtiger
- name: .encrypt
- name: .integration !.audit
- name: .jscore .common
- name: jsCore_minimum_batch_size
- name: jsCore_op_query
- name: jsCore_txns_large_txns_format
- name: .logical_session_cache .one_sec
- name: multiversion_gen
- name: .read_write_concern
- name: .replica_sets .san
- name: .read_only
- name: sasl
- name: secondary_reads_passthrough_gen
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.csrs
- name: snmp
- name: enterprise-ubuntu-dynamic-1604-64-bit
display_name: "* Shared Library Enterprise Ubuntu 16.04"
batchtime: 240 # 4 hours
modules:
- enterprise
expansions:
additional_targets: archive-mongocryptd archive-mongocryptd-debug
lang_environment: LANG=C
compile_flags: MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --link-model=dynamic
scons_cache_scope: shared
scons_cache_mode: all
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: enterprise-ubuntu-no-latch-1604-64-bit
display_name: "~ Enterprise Ubuntu 16.04 (without Diagnostic Latches)"
batchtime: 1440 # 1 day
modules:
- enterprise
expansions:
compile_flags: MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars --use-diagnostic-latches=off
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: enterprise-ubuntu-no-latch-1804-64-bit
display_name: "~ Enterprise Ubuntu 18.04 DEBUG (Unoptimized)"
batchtime: 1440 # 1 day
modules:
- enterprise
expansions:
compile_flags: MONGO_DISTMOD=ubuntu1804 -j$(grep -c ^processor /proc/cpuinfo) --dbg=on --opt=off --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
scons_cache_scope: shared
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1804-build
- name: enterprise-ubuntu-scanning-replica-set-monitor-1604-64-bit
display_name: "~ Enterprise Ubuntu 16.04 (with ScanningReplicaSetMonitor)"
batchtime: 1440 # 1 day
run_on:
- ubuntu1604-test
modules:
- enterprise
expansions:
scons_cache_scope: shared
compile_flags: MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
test_flags: |- # Set the taskExecutorPoolSize for all tests
--mongosSetParameters="replicaSetMonitorProtocol: scanning" \
--mongodSetParameters="replicaSetMonitorProtocol: scanning"
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: .aggregation !.no_async
- name: .sharding .auth
- name: .sharding .causally_consistent !.wo_snapshot
- name: .concurrency .common !.kill_terminate
- name: .integration !.audit
- name: .jscore .common
- name: .logical_session_cache .one_sec
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.csrs
- name: enterprise-ubuntu-task-executor-pool-size-1604-64-bit
display_name: "~ Enterprise Ubuntu 16.04 (with {taskExecutorPoolSize: 4})"
batchtime: 1440 # 1 day
run_on:
- ubuntu1604-test
modules:
- enterprise
expansions:
scons_cache_scope: shared
compile_flags: MONGO_DISTMOD=ubuntu1604 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
test_flags: |- # Set the taskExecutorPoolSize for all tests
--mongosSetParameters="taskExecutorPoolSize: 4"
tasks:
- name: compile_all_run_unittests_TG
distros:
- ubuntu1604-build
- name: .aggregation !.no_async
- name: .sharding .auth
- name: .sharding .causally_consistent !.wo_snapshot
- name: .concurrency .common !.kill_terminate
- name: .integration !.audit
- name: .jscore .common
- name: .logical_session_cache .one_sec
- name: .sharding .jscore !.wo_snapshot !.multi_stmt
- name: .sharding .common !.csrs
- name: shared-scons-cache-pruning
display_name: "Shared SCons Cache Pruning"
run_on:
- ubuntu1604-test
stepback: false
tasks:
- name: shared_scons_cache_pruning
- name: windows-shared-scons-cache-pruning
display_name: "Windows shared SCons Cache Pruning"
run_on:
- windows-64-vs2019-small
stepback: false
tasks:
- name: win_shared_scons_cache_pruning
- name: selected-tests
display_name: "~ Selected Tests"
modules:
- enterprise
run_on:
- rhel62-small
batchtime: 10080 # 7 days
stepback: false
expansions:
selected_tests_buildvariants: enterprise-rhel-62-64-bit enterprise-windows-required linux-64-debug enterprise-ubuntu-dynamic-1604-clang ubuntu1804-debug-aubsan-lite
tasks:
- name: selected_tests_gen
- name: commit-queue
display_name: "~ Commit Queue"
modules:
- enterprise
run_on:
- rhel62-small
batchtime: 10080 # 7 days
stepback: false
expansions:
mh_target: archive-mh archive-mh-debug
compile_flags: --ssl MONGO_DISTMOD=rhel62 -j$(grep -c ^processor /proc/cpuinfo) --variables-files=etc/scons/mongodbtoolchain_v3_gcc.vars
multiversion_platform: rhel62
multiversion_edition: enterprise
has_packages: true
packager_script: packager_enterprise.py
packager_arch: x86_64
packager_distro: rhel62
repo_edition: enterprise
scons_cache_scope: shared
jstestfuzz_num_generated_files: 40
jstestfuzz_concurrent_num_files: 10
target_resmoke_time: 10
large_distro_name: rhel62-large
burn_in_tag_buildvariants: enterprise-rhel-62-64-bit-majority-read-concern-off enterprise-rhel-62-64-bit-inmem linux-64-duroff
tasks:
- name: commit_queue_placeholder
- name: compile_core_tools_TG
distros:
- rhel62-large
- name: validate_commit_message