mirror of
https://github.com/mongodb/mongo.git
synced 2024-11-24 08:30:56 +01:00
SERVER-23312 Python linting - Lint using pylint, pydocstyle & mypy
This commit is contained in:
parent
a5dacf7092
commit
c50c68fef1
12
.pydocstyle
Normal file
12
.pydocstyle
Normal file
@ -0,0 +1,12 @@
|
||||
# See https://readthedocs.org/projects/pydocstyle/
|
||||
[pydocstyle]
|
||||
inherit = false
|
||||
# D105 - Missing docstring in magic method
|
||||
# D202 - No blank lines allowed after function docstring
|
||||
# D203 - 1 blank line required before class docstring
|
||||
# D212 - Multi-line docstring summary should start at the first line
|
||||
# D213 - Multi-line docstring summary should start at the second line
|
||||
# D301 - Use r""" if any backslashes in a docstring
|
||||
ignore = D105,D202,D203,D212,D213,D301
|
||||
# Do not run on buildscripts/tests/
|
||||
match = ^((?!buildscripts\/tests\/).)*$
|
25
.pylintrc
Normal file
25
.pylintrc
Normal file
@ -0,0 +1,25 @@
|
||||
# See https://www.pylint.org/
|
||||
[BASIC]
|
||||
# Permit 2 character & long argument names, like db
|
||||
argument-rgx=[a-z_][a-z0-9_]{1,50}$
|
||||
# Long attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
# Long function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
# Long method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
# Permit 2 character & long variable names, like sb
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,50}$
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
# C0301 - line-too-long - some of the type annotations are longer then 100 columns
|
||||
# C0330 - bad-continuation - ignore conflicts produced by yapf formatting
|
||||
# E0401 - import-error - ignore imports that fail to load
|
||||
# E1101 - no-member - ignore maybe no member warnings
|
||||
# I0011 - locally-disabled - ignore warnings about disable pylint checks
|
||||
# R0204 - redefined-variable-type
|
||||
# R0903 - too-few-public-methods - pylint does not always know best
|
||||
# W0511 - fixme - ignore TODOs in comments
|
||||
# W0611 - unused-import - typing module is needed for mypy
|
||||
|
||||
disable=bad-continuation,fixme,import-error,line-too-long,no-member,locally-disabled,redefined-variable-type,too-few-public-methods,unused-import
|
10
SConstruct
10
SConstruct
@ -509,7 +509,7 @@ try:
|
||||
print("version.json does not contain a version string")
|
||||
Exit(1)
|
||||
if 'githash' not in version_data:
|
||||
version_data['githash'] = utils.getGitVersion()
|
||||
version_data['githash'] = utils.get_git_version()
|
||||
|
||||
except IOError as e:
|
||||
# If the file error wasn't because the file is missing, error out
|
||||
@ -518,8 +518,8 @@ except IOError as e:
|
||||
Exit(1)
|
||||
|
||||
version_data = {
|
||||
'version': utils.getGitDescribe()[1:],
|
||||
'githash': utils.getGitVersion(),
|
||||
'version': utils.get_git_describe()[1:],
|
||||
'githash': utils.get_git_version(),
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
@ -3210,8 +3210,8 @@ if incremental_link.exists(env):
|
||||
|
||||
def checkErrorCodes():
|
||||
import buildscripts.errorcodes as x
|
||||
if x.checkErrorCodes() == False:
|
||||
env.FatalError("next id to use: {0}", x.getNextCode())
|
||||
if x.check_error_codes() == False:
|
||||
env.FatalError("next id to use: {0}", x.get_next_code())
|
||||
|
||||
checkErrorCodes()
|
||||
|
||||
|
@ -1,7 +0,0 @@
|
||||
# See https://readthedocs.org/projects/pydocstyle/
|
||||
[pydocstyle]
|
||||
inherit = false
|
||||
# D202 - No blank lines allowed after function docstring
|
||||
# D203 - 1 blank line required before class docstring
|
||||
# D212 - Multi-line docstring summary should start at the first line
|
||||
ignore = D202,D203,D212
|
@ -1,11 +0,0 @@
|
||||
# See https://www.pylint.org/
|
||||
[MESSAGES CONTROL]
|
||||
# C0301 - line-too-long - some of the type annotations are longer then 100 columns
|
||||
# C0330 - bad-continuation - ignore conflicts produced by yapf formatting
|
||||
# E0401 - import-error - ignore imports that fail to load
|
||||
# I0011 - locally-disabled - ignore warnings about disable pylint checks
|
||||
# R0903 - too-few-public-method - pylint does not always know best
|
||||
# W0511 - fixme - ignore TODOs in comments
|
||||
# W0611 - unused-import - typing module is needed for mypy
|
||||
|
||||
disable=bad-continuation,fixme,import-error,line-too-long,locally-disabled,too-few-public-methods,unused-import
|
@ -1 +1 @@
|
||||
|
||||
"""Empty."""
|
||||
|
@ -1,15 +1,18 @@
|
||||
"""Aggregate_tracefiles module.
|
||||
|
||||
This script aggregates several tracefiles into one tracefile.
|
||||
All but the last argument are input tracefiles or .txt files which list tracefiles.
|
||||
The last argument is the tracefile to which the output will be written.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
""" This script aggregates several tracefiles into one tracefile
|
||||
All but the last argument are input tracefiles or .txt files which list tracefiles.
|
||||
The last argument is the tracefile to which the output will be written
|
||||
"""
|
||||
|
||||
|
||||
def aggregate(inputs, output):
|
||||
"""Aggregates the tracefiles given in inputs to a tracefile given by output"""
|
||||
"""Aggregate the tracefiles given in inputs to a tracefile given by output."""
|
||||
args = ['lcov']
|
||||
|
||||
for name in inputs:
|
||||
@ -23,23 +26,25 @@ def aggregate(inputs, output):
|
||||
|
||||
|
||||
def getfilesize(path):
|
||||
"""Return file size of 'path'."""
|
||||
if not os.path.isfile(path):
|
||||
return 0
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main entry."""
|
||||
inputs = []
|
||||
|
||||
usage = "usage: %prog input1.info input2.info ... output.info"
|
||||
parser = OptionParser(usage=usage)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
(_, args) = parser.parse_args()
|
||||
if len(args) < 2:
|
||||
return "must supply input files"
|
||||
|
||||
for path in args[:-1]:
|
||||
name, ext = os.path.splitext(path)
|
||||
_, ext = os.path.splitext(path)
|
||||
|
||||
if ext == '.info':
|
||||
if getfilesize(path) > 0:
|
||||
|
@ -25,6 +25,7 @@ class AwsEc2(object):
|
||||
])
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize AwsEc2."""
|
||||
try:
|
||||
self.connection = boto3.resource("ec2")
|
||||
except botocore.exceptions.BotoCoreError:
|
||||
@ -36,7 +37,9 @@ class AwsEc2(object):
|
||||
@staticmethod
|
||||
def wait_for_state(instance, state, wait_time_secs=0, show_progress=False):
|
||||
"""Wait up to 'wait_time_secs' for instance to be in 'state'.
|
||||
Return 0 if 'state' reached, 1 otherwise."""
|
||||
|
||||
Return 0 if 'state' reached, 1 otherwise.
|
||||
"""
|
||||
if show_progress:
|
||||
print("Waiting for instance {} to reach '{}' state".format(instance, state), end="",
|
||||
file=sys.stdout)
|
||||
@ -72,7 +75,7 @@ class AwsEc2(object):
|
||||
return 0 if reached_state else 1
|
||||
|
||||
def control_instance(self, mode, image_id, wait_time_secs=0, show_progress=False):
|
||||
"""Controls an AMI instance. Returns 0 & status information, if successful."""
|
||||
"""Control an AMI instance. Returns 0 & status information, if successful."""
|
||||
if mode not in _MODES:
|
||||
raise ValueError("Invalid mode '{}' specified, choose from {}.".format(mode, _MODES))
|
||||
|
||||
@ -119,7 +122,7 @@ class AwsEc2(object):
|
||||
return ret, status
|
||||
|
||||
def tag_instance(self, image_id, tags):
|
||||
"""Tags an AMI instance. """
|
||||
"""Tag an AMI instance."""
|
||||
if tags:
|
||||
# It's possible that ClientError code InvalidInstanceID.NotFound could be returned,
|
||||
# even if the 'image_id' exists. We will retry up to 5 times, with increasing wait,
|
||||
@ -135,12 +138,14 @@ class AwsEc2(object):
|
||||
time.sleep(i + 1)
|
||||
instance.create_tags(Tags=tags)
|
||||
|
||||
def launch_instance(self, ami, instance_type, block_devices=None, key_name=None,
|
||||
security_group_ids=None, security_groups=None, subnet_id=None, tags=None,
|
||||
wait_time_secs=0, show_progress=False, **kwargs):
|
||||
"""Launches and tags an AMI instance.
|
||||
def launch_instance( # pylint: disable=too-many-arguments,too-many-locals
|
||||
self, ami, instance_type, block_devices=None, key_name=None, security_group_ids=None,
|
||||
security_groups=None, subnet_id=None, tags=None, wait_time_secs=0, show_progress=False,
|
||||
**kwargs):
|
||||
"""Launch and tag an AMI instance.
|
||||
|
||||
Returns the tuple (0, status_information), if successful."""
|
||||
Return the tuple (0, status_information), if successful.
|
||||
"""
|
||||
|
||||
bdms = []
|
||||
if block_devices is None:
|
||||
@ -177,8 +182,8 @@ class AwsEc2(object):
|
||||
return self.control_instance("status", instance.instance_id)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program."""
|
||||
def main(): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Execute Main program."""
|
||||
|
||||
required_create_options = ["ami", "key_name"]
|
||||
|
||||
@ -242,9 +247,7 @@ def main():
|
||||
" bracketed YAML - i.e. JSON with support for single quoted"
|
||||
" and unquoted keys. Example, '{DryRun: True}'"))
|
||||
|
||||
status_options.add_option("--yamlFile",
|
||||
dest="yaml_file",
|
||||
default=None,
|
||||
status_options.add_option("--yamlFile", dest="yaml_file", default=None,
|
||||
help="Save the status into the specified YAML file.")
|
||||
|
||||
parser.add_option_group(control_options)
|
||||
@ -309,5 +312,6 @@ def main():
|
||||
|
||||
print(yaml.safe_dump(status_dict))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Command line utility for determining what jstests have been added or modified
|
||||
"""
|
||||
"""Command line utility for determining what jstests have been added or modified."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -12,22 +10,24 @@ import optparse
|
||||
import os.path
|
||||
import subprocess
|
||||
import re
|
||||
import requests
|
||||
import shlex
|
||||
import sys
|
||||
import urlparse
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
# Get relative imports to work when the package is not installed on the PYTHONPATH.
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from buildscripts import resmokelib
|
||||
from buildscripts.ciconfig import evergreen
|
||||
from buildscripts import resmokelib # pylint: disable=wrong-import-position
|
||||
from buildscripts.ciconfig import evergreen # pylint: disable=wrong-import-position
|
||||
|
||||
API_SERVER_DEFAULT = "https://evergreen.mongodb.com"
|
||||
|
||||
|
||||
def parse_command_line():
|
||||
"""Parse command line options."""
|
||||
parser = optparse.OptionParser(usage="Usage: %prog [options] [resmoke command]")
|
||||
|
||||
parser.add_option("--maxRevisions", dest="max_revisions",
|
||||
@ -75,12 +75,12 @@ def parse_command_line():
|
||||
|
||||
|
||||
def callo(args):
|
||||
"""Call a program, and capture its output
|
||||
"""
|
||||
"""Call a program, and capture its output."""
|
||||
return subprocess.check_output(args)
|
||||
|
||||
|
||||
def read_evg_config():
|
||||
"""Read evg config file."""
|
||||
# Expand out evergreen config file possibilities
|
||||
file_list = [
|
||||
"./.evergreen.yml",
|
||||
@ -96,7 +96,7 @@ def read_evg_config():
|
||||
|
||||
|
||||
def find_last_activated_task(revisions, variant, branch_name):
|
||||
""" Get the git hash of the most recently activated build before this one """
|
||||
"""Get the git hash of the most recently activated build before this one."""
|
||||
rest_prefix = "/rest/v1/"
|
||||
project = "mongodb-mongo-" + branch_name
|
||||
build_prefix = "mongodb_mongo_" + branch_name + "_" + variant.replace('-', '_')
|
||||
@ -121,15 +121,16 @@ def find_last_activated_task(revisions, variant, branch_name):
|
||||
build_data = build_resp.json()
|
||||
if build_data["activated"]:
|
||||
return build_data["revision"]
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
# Sometimes build data is incomplete, as was the related build.
|
||||
next
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, check_evergreen):
|
||||
"""
|
||||
"""Find the changed tests.
|
||||
|
||||
Use git to find which files have changed in this patch.
|
||||
TODO: This should be expanded to search for enterprise modules.
|
||||
The returned file paths are in normalized form (see os.path.normpath(path)).
|
||||
@ -172,7 +173,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
|
||||
# The lines with untracked files start with '?? '.
|
||||
for line in untracked_files:
|
||||
if line.startswith("?"):
|
||||
(status, line) = line.split(" ", 1)
|
||||
(_, line) = line.split(" ", 1)
|
||||
changed_files.append(line)
|
||||
|
||||
for line in changed_files:
|
||||
@ -187,9 +188,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
|
||||
|
||||
|
||||
def find_exclude_tests(selector_file):
|
||||
"""
|
||||
Parses etc/burn_in_tests.yml. Returns lists of excluded suites, tasks & tests.
|
||||
"""
|
||||
"""Parse etc/burn_in_tests.yml. Returns lists of excluded suites, tasks & tests."""
|
||||
|
||||
if not selector_file:
|
||||
return ([], [], [])
|
||||
@ -209,8 +208,8 @@ def find_exclude_tests(selector_file):
|
||||
|
||||
|
||||
def filter_tests(tests, exclude_tests):
|
||||
"""
|
||||
Excludes tests which have been blacklisted.
|
||||
"""Exclude tests which have been blacklisted.
|
||||
|
||||
A test is in the tests list, i.e., ['jstests/core/a.js']
|
||||
The tests paths must be in normalized form (see os.path.normpath(path)).
|
||||
"""
|
||||
@ -227,7 +226,8 @@ def filter_tests(tests, exclude_tests):
|
||||
|
||||
|
||||
def find_tests_by_executor(suites):
|
||||
"""
|
||||
"""Find tests by executor.
|
||||
|
||||
Looks up what other resmoke suites run the tests specified in the suites
|
||||
parameter. Returns a dict keyed by test name, value is array of suite names.
|
||||
"""
|
||||
@ -241,7 +241,8 @@ def find_tests_by_executor(suites):
|
||||
|
||||
|
||||
def create_executor_list(suites, exclude_suites):
|
||||
"""
|
||||
"""Create the executor list.
|
||||
|
||||
Looks up what other resmoke suites run the tests specified in the suites
|
||||
parameter. Returns a dict keyed by suite name / executor, value is tests
|
||||
to run under that executor.
|
||||
@ -257,8 +258,7 @@ def create_executor_list(suites, exclude_suites):
|
||||
|
||||
|
||||
def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
|
||||
"""
|
||||
Finds associated tasks for the specified buildvariant and suites.
|
||||
"""Find associated tasks for the specified buildvariant and suites.
|
||||
|
||||
Returns a dict keyed by task_name, with executor, resmoke_args & tests, i.e.,
|
||||
{'jsCore_small_oplog':
|
||||
@ -288,25 +288,25 @@ def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
|
||||
for suite in suites.keys():
|
||||
for task_name, task_arg in variant_task_args.items():
|
||||
# Find the resmoke_args for matching suite names.
|
||||
if re.compile('--suites=' + suite + '(?:\s+|$)').match(task_arg):
|
||||
if re.compile('--suites=' + suite + r'(?:\s+|$)').match(task_arg):
|
||||
tasks_to_run[task_name] = {"resmoke_args": task_arg, "tests": suites[suite]}
|
||||
|
||||
return tasks_to_run
|
||||
|
||||
|
||||
def _write_report_file(tests_by_executor, pathname):
|
||||
"""
|
||||
Writes out a JSON file containing the tests_by_executor dict. This should
|
||||
be done during the compile task when the git repo is available.
|
||||
"""Write out a JSON file containing the tests_by_executor dict.
|
||||
|
||||
This should be done during the compile task when the git repo is available.
|
||||
"""
|
||||
with open(pathname, "w") as fstream:
|
||||
json.dump(tests_by_executor, fstream)
|
||||
|
||||
|
||||
def _load_tests_file(pathname):
|
||||
"""
|
||||
Load the list of tests and executors from the specified file. The file might
|
||||
not exist, and this is fine. The task running this becomes a nop.
|
||||
"""Load the list of tests and executors from the specified file.
|
||||
|
||||
The file might not exist, and this is fine. The task running this becomes a nop.
|
||||
"""
|
||||
if not os.path.isfile(pathname):
|
||||
return None
|
||||
@ -315,12 +315,12 @@ def _load_tests_file(pathname):
|
||||
|
||||
|
||||
def _save_report_data(saved_data, pathname, task):
|
||||
"""
|
||||
Read in the report file from the previous resmoke.py run if it exists. We'll concat it to the
|
||||
passed saved_data dict.
|
||||
"""Read in the report file from the previous resmoke.py run if it exists.
|
||||
|
||||
We'll concat it to the passed saved_data dict.
|
||||
"""
|
||||
if not os.path.isfile(pathname):
|
||||
return None
|
||||
return
|
||||
|
||||
with open(pathname, "r") as fstream:
|
||||
current_data = json.load(fstream)
|
||||
@ -332,6 +332,7 @@ def _save_report_data(saved_data, pathname, task):
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
values, args = parse_command_line()
|
||||
|
||||
# If a resmoke.py command wasn't passed in, use a simple version.
|
||||
|
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""Bypass compile and fetch binaries."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
@ -7,15 +8,16 @@ import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
|
||||
import urllib
|
||||
# pylint: disable=ungrouped-imports
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlparse # type: ignore
|
||||
# pylint: enable=ungrouped-imports
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
@ -24,6 +26,7 @@ _IS_WINDOWS = (sys.platform == "win32" or sys.platform == "cygwin")
|
||||
|
||||
|
||||
def executable_name(pathname):
|
||||
"""Return the executable name."""
|
||||
# Ensure that executable files on Windows have a ".exe" extension.
|
||||
if _IS_WINDOWS and os.path.splitext(pathname)[1] != ".exe":
|
||||
return "{}.exe".format(pathname)
|
||||
@ -31,6 +34,7 @@ def executable_name(pathname):
|
||||
|
||||
|
||||
def archive_name(archive):
|
||||
"""Return the archive name."""
|
||||
# Ensure the right archive extension is used for Windows.
|
||||
if _IS_WINDOWS:
|
||||
return "{}.zip".format(archive)
|
||||
@ -38,6 +42,7 @@ def archive_name(archive):
|
||||
|
||||
|
||||
def requests_get_json(url):
|
||||
"""Return the JSON response."""
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
|
||||
@ -49,9 +54,9 @@ def requests_get_json(url):
|
||||
|
||||
|
||||
def read_evg_config():
|
||||
"""
|
||||
Attempts to parse the Evergreen configuration from its home location.
|
||||
Returns None if the configuration file wasn't found.
|
||||
"""Attempt to parse the Evergreen configuration from its home location.
|
||||
|
||||
Return None if the configuration file wasn't found.
|
||||
"""
|
||||
evg_file = os.path.expanduser("~/.evergreen.yml")
|
||||
if os.path.isfile(evg_file):
|
||||
@ -62,18 +67,14 @@ def read_evg_config():
|
||||
|
||||
|
||||
def write_out_bypass_compile_expansions(patch_file, **expansions):
|
||||
"""
|
||||
Write out the macro expansions to given file.
|
||||
"""
|
||||
"""Write out the macro expansions to given file."""
|
||||
with open(patch_file, "w") as out_file:
|
||||
print("Saving compile bypass expansions to {0}: ({1})".format(patch_file, expansions))
|
||||
yaml.safe_dump(expansions, out_file, default_flow_style=False)
|
||||
|
||||
|
||||
def write_out_artifacts(json_file, artifacts):
|
||||
"""
|
||||
Write out the JSON file with URLs of artifacts to given file.
|
||||
"""
|
||||
"""Write out the JSON file with URLs of artifacts to given file."""
|
||||
with open(json_file, "w") as out_file:
|
||||
print("Generating artifacts.json from pre-existing artifacts {0}".format(
|
||||
json.dumps(artifacts, indent=4)))
|
||||
@ -81,6 +82,7 @@ def write_out_artifacts(json_file, artifacts):
|
||||
|
||||
|
||||
def generate_bypass_expansions(project, build_variant, revision, build_id):
|
||||
"""Perform the generate bypass expansions."""
|
||||
expansions = {}
|
||||
# With compile bypass we need to update the URL to point to the correct name of the base commit
|
||||
# binaries.
|
||||
@ -103,8 +105,7 @@ def generate_bypass_expansions(project, build_variant, revision, build_id):
|
||||
|
||||
|
||||
def should_bypass_compile():
|
||||
"""
|
||||
Based on the modified patch files determine whether the compile stage should be bypassed.
|
||||
"""Determine whether the compile stage should be bypassed based on the modified patch files.
|
||||
|
||||
We use lists of files and directories to more precisely control which modified patch files will
|
||||
lead to compile bypass.
|
||||
@ -133,7 +134,7 @@ def should_bypass_compile():
|
||||
"buildscripts/make_archive.py",
|
||||
"buildscripts/moduleconfig.py",
|
||||
"buildscripts/msitrim.py",
|
||||
"buildscripts/packager-enterprise.py",
|
||||
"buildscripts/packager_enterprise.py",
|
||||
"buildscripts/packager.py",
|
||||
"buildscripts/scons.py",
|
||||
"buildscripts/utils.py",
|
||||
@ -171,6 +172,7 @@ def should_bypass_compile():
|
||||
|
||||
|
||||
def parse_args():
|
||||
"""Parse the program arguments."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--project", required=True,
|
||||
help="The Evergreen project. e.g mongodb-mongo-master")
|
||||
@ -192,8 +194,9 @@ def parse_args():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
def main(): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Execute Main entry.
|
||||
|
||||
From the /rest/v1/projects/{project}/revisions/{revision} endpoint find an existing build id
|
||||
to generate the compile task id to use for retrieving artifacts when bypassing compile.
|
||||
|
||||
@ -225,6 +228,7 @@ def main():
|
||||
# Evergreen only contain "_". Replace the hyphens before searching for the build.
|
||||
prefix = prefix.replace("-", "_")
|
||||
build_id_pattern = re.compile(prefix)
|
||||
build_id = None
|
||||
for build_id in revisions["builds"]:
|
||||
# Find a suitable build_id
|
||||
match = build_id_pattern.search(build_id)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""API to parse and access the configuration present in a evergreen.yml file.
|
||||
|
||||
The API also provides methods to access specific fields present in the mongodb/mongo
|
||||
configuration file.
|
||||
"""
|
||||
@ -31,7 +32,7 @@ class EvergreenProjectConfig(object):
|
||||
|
||||
@property
|
||||
def task_names(self):
|
||||
"""The list of task names."""
|
||||
"""Get the list of task names."""
|
||||
return self._tasks_by_name.keys()
|
||||
|
||||
def get_task(self, task_name):
|
||||
@ -40,7 +41,7 @@ class EvergreenProjectConfig(object):
|
||||
|
||||
@property
|
||||
def lifecycle_task_names(self):
|
||||
"""The list of names of the tasks that have not been excluded from test lifecycle."""
|
||||
"""Get the list of names of the tasks that have not been excluded from test lifecycle."""
|
||||
excluded = self._get_test_lifecycle_excluded_task_names()
|
||||
return [name for name in self.task_names if name not in excluded]
|
||||
|
||||
@ -53,7 +54,7 @@ class EvergreenProjectConfig(object):
|
||||
|
||||
@property
|
||||
def variant_names(self):
|
||||
"""The list of build variant names."""
|
||||
"""Get the list of build variant names."""
|
||||
return self._variants_by_name.keys()
|
||||
|
||||
def get_variant(self, variant_name):
|
||||
@ -70,19 +71,17 @@ class Task(object):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The task name."""
|
||||
"""Get the task name."""
|
||||
return self.raw["name"]
|
||||
|
||||
@property
|
||||
def depends_on(self):
|
||||
"""The list of task names this task depends on."""
|
||||
"""Get the list of task names this task depends on."""
|
||||
return self.raw.get("depends_on", [])
|
||||
|
||||
@property
|
||||
def resmoke_args(self):
|
||||
"""The value of the resmoke_args argument of the 'run tests' function if it is
|
||||
defined, or None.
|
||||
"""
|
||||
"""Get the resmoke_args from 'run tests' function if defined, or None."""
|
||||
for command in self.raw.get("commands", []):
|
||||
if command.get("func") == "run tests":
|
||||
return command.get("vars", {}).get("resmoke_args")
|
||||
@ -90,22 +89,21 @@ class Task(object):
|
||||
|
||||
@property
|
||||
def resmoke_suite(self):
|
||||
"""The value of the --suites option in the resmoke_args argument of the 'run tests'
|
||||
function if it is defined, or None. """
|
||||
"""Get the --suites option in the resmoke_args of 'run tests' if defined, or None."""
|
||||
args = self.resmoke_args
|
||||
if args:
|
||||
return ResmokeArgs.get_arg(args, "suites")
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Variant(object):
|
||||
"""Represent a build variant configuration as found in an Evergreen project
|
||||
configuration file.
|
||||
"""
|
||||
"""Build variant configuration as found in an Evergreen project configuration file."""
|
||||
|
||||
def __init__(self, conf_dict, task_map):
|
||||
"""Initialize Variant."""
|
||||
self.raw = conf_dict
|
||||
run_on = self.run_on
|
||||
self.tasks = [
|
||||
@ -118,40 +116,44 @@ class Variant(object):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The build variant name."""
|
||||
"""Get the build variant name."""
|
||||
return self.raw["name"]
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
"""The build variant display name, or None if not found."""
|
||||
"""Get the build variant display name, or None if not found."""
|
||||
return self.raw.get("display_name")
|
||||
|
||||
@property
|
||||
def batchtime(self):
|
||||
"""The build variant batchtime parameter as a datetime.timedelta, or None if not found."""
|
||||
"""Get the build variant batchtime parameter as datetime.timedelta.
|
||||
|
||||
Return None if the batchtime parameter is not found.
|
||||
"""
|
||||
batchtime = self.raw.get("batchtime")
|
||||
return datetime.timedelta(minutes=batchtime) if batchtime is not None else None
|
||||
|
||||
@property
|
||||
def modules(self):
|
||||
"""The build variant modules parameter as a list of module names."""
|
||||
"""Get build variant modules parameter as a list of module names."""
|
||||
modules = self.raw.get("modules")
|
||||
return modules if modules is not None else []
|
||||
|
||||
@property
|
||||
def run_on(self):
|
||||
"""The build variant run_on parameter as a list of distro names."""
|
||||
"""Get build variant run_on parameter as a list of distro names."""
|
||||
run_on = self.raw.get("run_on")
|
||||
return run_on if run_on is not None else []
|
||||
|
||||
@property
|
||||
def task_names(self):
|
||||
"""The list of task names."""
|
||||
"""Get list of task names."""
|
||||
return [t.name for t in self.tasks]
|
||||
|
||||
def get_task(self, task_name):
|
||||
"""Return the task with the given name as an instance of VariantTask or None if this
|
||||
variant does not run the task.
|
||||
"""Return the task with the given name as an instance of VariantTask.
|
||||
|
||||
Return None if this variant does not run the task.
|
||||
"""
|
||||
for task in self.tasks:
|
||||
if task.name == task_name:
|
||||
@ -169,12 +171,12 @@ class Variant(object):
|
||||
|
||||
@property
|
||||
def test_flags(self):
|
||||
"""Return the value of the test_flags expansion or None if not found."""
|
||||
"""Get the value of the test_flags expansion or None if not found."""
|
||||
return self.expansion("test_flags")
|
||||
|
||||
@property
|
||||
def num_jobs_available(self):
|
||||
"""Return the value of the num_jobs_available expansion or None if not found."""
|
||||
"""Get the value of the num_jobs_available expansion or None if not found."""
|
||||
return self.expansion("num_jobs_available")
|
||||
|
||||
|
||||
@ -182,16 +184,17 @@ class VariantTask(Task):
|
||||
"""Represent a task definition in the context of a build variant."""
|
||||
|
||||
def __init__(self, task, run_on, variant):
|
||||
"""Initialize VariantTask."""
|
||||
Task.__init__(self, task.raw)
|
||||
self.run_on = run_on
|
||||
self.variant = variant
|
||||
|
||||
@property
|
||||
def combined_resmoke_args(self):
|
||||
"""Return the combined resmoke arguments resulting from the concatenation of the task's
|
||||
resmoke_args parameter and the variant's test_flags parameter.
|
||||
"""Get the combined resmoke arguments.
|
||||
|
||||
If the task does not have a 'resmoke_args' parameter, then None is returned.
|
||||
This results from the concatenation of the task's resmoke_args parameter and the
|
||||
variant's test_flags parameter.
|
||||
"""
|
||||
resmoke_args = self.resmoke_args
|
||||
test_flags = self.variant.test_flags
|
||||
@ -199,16 +202,16 @@ class VariantTask(Task):
|
||||
return None
|
||||
elif test_flags is None:
|
||||
return self.resmoke_args
|
||||
else:
|
||||
return "{} {}".format(resmoke_args, test_flags)
|
||||
return "{} {}".format(resmoke_args, test_flags)
|
||||
|
||||
|
||||
class ResmokeArgs(object):
|
||||
"""ResmokeArgs class."""
|
||||
|
||||
@staticmethod
|
||||
def get_arg(resmoke_args, name):
|
||||
"""Return the value of the option --'name' in the 'resmoke_args' string or
|
||||
None if not found.
|
||||
"""
|
||||
"""Return the value from --'name' in the 'resmoke_args' string or None if not found."""
|
||||
match = re.search(r"--{}[=\s](?P<value>\w+)".format(name), resmoke_args)
|
||||
if match:
|
||||
return match.group("value")
|
||||
return None
|
||||
|
@ -119,21 +119,23 @@ class TagsConfig(object):
|
||||
|
||||
|
||||
def getdefault(doc, key, default):
|
||||
"""Return the value in 'doc' with key 'key' if it is present and not None, returns
|
||||
the specified default value otherwise."""
|
||||
"""Return the value in 'doc' with key 'key' if present and not None.
|
||||
|
||||
Return the specified default value otherwise.
|
||||
"""
|
||||
value = doc.get(key)
|
||||
if value is not None:
|
||||
return value
|
||||
else:
|
||||
return default
|
||||
return default
|
||||
|
||||
|
||||
def setdefault(doc, key, default):
|
||||
"""Return the value in 'doc' with key 'key' if it is present and not None, sets the value
|
||||
to default and return it otherwise."""
|
||||
"""Return the value in 'doc' with key 'key' if present and not None.
|
||||
|
||||
Otherwise set the value to default and return it.
|
||||
"""
|
||||
value = doc.setdefault(key, default)
|
||||
if value is not None:
|
||||
return value
|
||||
else:
|
||||
doc[key] = default
|
||||
return default
|
||||
doc[key] = default
|
||||
return default
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
A script that provides:
|
||||
"""Clang format script that provides the following.
|
||||
|
||||
1. Ability to grab binaries where possible from LLVM.
|
||||
2. Ability to download binaries from MongoDB cache for clang-format.
|
||||
3. Validates clang-format is the right version.
|
||||
@ -21,7 +21,7 @@ import tarfile
|
||||
import tempfile
|
||||
import threading
|
||||
import urllib2
|
||||
from distutils import spawn
|
||||
from distutils import spawn # pylint: disable=no-name-in-module
|
||||
from optparse import OptionParser
|
||||
from multiprocessing import cpu_count
|
||||
|
||||
@ -29,8 +29,8 @@ from multiprocessing import cpu_count
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
|
||||
|
||||
from buildscripts.linter import git
|
||||
from buildscripts.linter import parallel
|
||||
from buildscripts.linter import git # pylint: disable=wrong-import-position
|
||||
from buildscripts.linter import parallel # pylint: disable=wrong-import-position
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
@ -58,18 +58,17 @@ CLANG_FORMAT_SOURCE_TAR_BASE = string.Template(
|
||||
|
||||
##############################################################################
|
||||
def callo(args):
|
||||
"""Call a program, and capture its output
|
||||
"""
|
||||
"""Call a program, and capture its output."""
|
||||
return subprocess.check_output(args)
|
||||
|
||||
|
||||
def get_tar_path(version, tar_path):
|
||||
""" Get the path to clang-format in the llvm tarball
|
||||
"""
|
||||
"""Return the path to clang-format in the llvm tarball."""
|
||||
return CLANG_FORMAT_SOURCE_TAR_BASE.substitute(version=version, tar_path=tar_path)
|
||||
|
||||
|
||||
def extract_clang_format(tar_path):
|
||||
"""Extract the clang_format tar file."""
|
||||
# Extract just the clang-format binary
|
||||
# On OSX, we shell out to tar because tarfile doesn't support xz compression
|
||||
if sys.platform == 'darwin':
|
||||
@ -85,9 +84,7 @@ def extract_clang_format(tar_path):
|
||||
|
||||
|
||||
def get_clang_format_from_cache_and_extract(url, tarball_ext):
|
||||
"""Get clang-format from mongodb's cache
|
||||
and extract the tarball
|
||||
"""
|
||||
"""Get clang-format from mongodb's cache and extract the tarball."""
|
||||
dest_dir = tempfile.gettempdir()
|
||||
temp_tar_file = os.path.join(dest_dir, "temp.tar" + tarball_ext)
|
||||
|
||||
@ -100,8 +97,8 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
|
||||
for attempt in range(num_tries):
|
||||
try:
|
||||
resp = urllib2.urlopen(url)
|
||||
with open(temp_tar_file, 'wb') as f:
|
||||
f.write(resp.read())
|
||||
with open(temp_tar_file, 'wb') as fh:
|
||||
fh.write(resp.read())
|
||||
break
|
||||
except urllib2.URLError:
|
||||
if attempt == num_tries - 1:
|
||||
@ -112,9 +109,7 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
|
||||
|
||||
|
||||
def get_clang_format_from_darwin_cache(dest_file):
|
||||
"""Download clang-format from llvm.org, unpack the tarball,
|
||||
and put clang-format in the specified place
|
||||
"""
|
||||
"""Download clang-format from llvm.org, unpack the tarball to dest_file."""
|
||||
get_clang_format_from_cache_and_extract(CLANG_FORMAT_HTTP_DARWIN_CACHE, ".xz")
|
||||
|
||||
# Destination Path
|
||||
@ -122,8 +117,7 @@ def get_clang_format_from_darwin_cache(dest_file):
|
||||
|
||||
|
||||
def get_clang_format_from_linux_cache(dest_file):
|
||||
"""Get clang-format from mongodb's cache
|
||||
"""
|
||||
"""Get clang-format from mongodb's cache."""
|
||||
get_clang_format_from_cache_and_extract(CLANG_FORMAT_HTTP_LINUX_CACHE, ".gz")
|
||||
|
||||
# Destination Path
|
||||
@ -131,11 +125,10 @@ def get_clang_format_from_linux_cache(dest_file):
|
||||
|
||||
|
||||
class ClangFormat(object):
|
||||
"""Class encapsulates finding a suitable copy of clang-format,
|
||||
and linting/formating an individual file
|
||||
"""
|
||||
"""ClangFormat class."""
|
||||
|
||||
def __init__(self, path, cache_dir):
|
||||
def __init__(self, path, cache_dir): # pylint: disable=too-many-branches
|
||||
"""Initialize ClangFormat."""
|
||||
self.path = None
|
||||
clang_format_progname_ext = ""
|
||||
|
||||
@ -167,7 +160,7 @@ class ClangFormat(object):
|
||||
]
|
||||
|
||||
if sys.platform == "win32":
|
||||
for i in range(len(programs)):
|
||||
for i, _ in enumerate(programs):
|
||||
programs[i] += '.exe'
|
||||
|
||||
for program in programs:
|
||||
@ -222,8 +215,7 @@ class ClangFormat(object):
|
||||
self.print_lock = threading.Lock()
|
||||
|
||||
def _validate_version(self):
|
||||
"""Validate clang-format is the expected version
|
||||
"""
|
||||
"""Validate clang-format is the expected version."""
|
||||
cf_version = callo([self.path, "--version"])
|
||||
|
||||
if CLANG_FORMAT_VERSION in cf_version:
|
||||
@ -235,8 +227,7 @@ class ClangFormat(object):
|
||||
return False
|
||||
|
||||
def _lint(self, file_name, print_diff):
|
||||
"""Check the specified file has the correct format
|
||||
"""
|
||||
"""Check the specified file has the correct format."""
|
||||
with open(file_name, 'rb') as original_text:
|
||||
original_file = original_text.read()
|
||||
|
||||
@ -262,13 +253,11 @@ class ClangFormat(object):
|
||||
return True
|
||||
|
||||
def lint(self, file_name):
|
||||
"""Check the specified file has the correct format
|
||||
"""
|
||||
"""Check the specified file has the correct format."""
|
||||
return self._lint(file_name, print_diff=True)
|
||||
|
||||
def format(self, file_name):
|
||||
"""Update the format of the specified file
|
||||
"""
|
||||
"""Update the format of the specified file."""
|
||||
if self._lint(file_name, print_diff=False):
|
||||
return True
|
||||
|
||||
@ -285,32 +274,28 @@ class ClangFormat(object):
|
||||
return formatted
|
||||
|
||||
|
||||
files_re = re.compile('\\.(h|hpp|ipp|cpp|js)$')
|
||||
FILES_RE = re.compile('\\.(h|hpp|ipp|cpp|js)$')
|
||||
|
||||
|
||||
def is_interesting_file(file_name):
|
||||
""""Return true if this file should be checked
|
||||
"""
|
||||
"""Return true if this file should be checked."""
|
||||
return ((file_name.startswith("jstests") or file_name.startswith("src"))
|
||||
and not file_name.startswith("src/third_party/")
|
||||
and not file_name.startswith("src/mongo/gotools/")) and files_re.search(file_name)
|
||||
and not file_name.startswith("src/mongo/gotools/")) and FILES_RE.search(file_name)
|
||||
|
||||
|
||||
def get_list_from_lines(lines):
|
||||
""""Convert a string containing a series of lines into a list of strings
|
||||
"""
|
||||
"""Convert a string containing a series of lines into a list of strings."""
|
||||
return [line.rstrip() for line in lines.splitlines()]
|
||||
|
||||
|
||||
def _get_build_dir():
|
||||
"""Get the location of the scons' build directory in case we need to download clang-format
|
||||
"""
|
||||
"""Return the location of the scons' build directory."""
|
||||
return os.path.join(git.get_base_dir(), "build")
|
||||
|
||||
|
||||
def _lint_files(clang_format, files):
|
||||
"""Lint a list of files with clang-format
|
||||
"""
|
||||
"""Lint a list of files with clang-format."""
|
||||
clang_format = ClangFormat(clang_format, _get_build_dir())
|
||||
|
||||
lint_clean = parallel.parallel_process([os.path.abspath(f) for f in files], clang_format.lint)
|
||||
@ -321,8 +306,7 @@ def _lint_files(clang_format, files):
|
||||
|
||||
|
||||
def lint_patch(clang_format, infile):
|
||||
"""Lint patch command entry point
|
||||
"""
|
||||
"""Lint patch command entry point."""
|
||||
files = git.get_files_to_check_from_patch(infile, is_interesting_file)
|
||||
|
||||
# Patch may have files that we do not want to check which is fine
|
||||
@ -331,8 +315,7 @@ def lint_patch(clang_format, infile):
|
||||
|
||||
|
||||
def lint(clang_format):
|
||||
"""Lint files command entry point
|
||||
"""
|
||||
"""Lint files command entry point."""
|
||||
files = git.get_files_to_check([], is_interesting_file)
|
||||
|
||||
_lint_files(clang_format, files)
|
||||
@ -341,8 +324,7 @@ def lint(clang_format):
|
||||
|
||||
|
||||
def lint_all(clang_format):
|
||||
"""Lint files command entry point based on working tree
|
||||
"""
|
||||
"""Lint files command entry point based on working tree."""
|
||||
files = git.get_files_to_check_working_tree(is_interesting_file)
|
||||
|
||||
_lint_files(clang_format, files)
|
||||
@ -351,8 +333,7 @@ def lint_all(clang_format):
|
||||
|
||||
|
||||
def _format_files(clang_format, files):
|
||||
"""Format a list of files with clang-format
|
||||
"""
|
||||
"""Format a list of files with clang-format."""
|
||||
clang_format = ClangFormat(clang_format, _get_build_dir())
|
||||
|
||||
format_clean = parallel.parallel_process([os.path.abspath(f) for f in files],
|
||||
@ -364,16 +345,15 @@ def _format_files(clang_format, files):
|
||||
|
||||
|
||||
def format_func(clang_format):
|
||||
"""Format files command entry point
|
||||
"""
|
||||
"""Format files command entry point."""
|
||||
files = git.get_files_to_check([], is_interesting_file)
|
||||
|
||||
_format_files(clang_format, files)
|
||||
|
||||
|
||||
def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reformat):
|
||||
"""Reformat a branch made before a clang-format run
|
||||
"""
|
||||
def reformat_branch( # pylint: disable=too-many-branches,too-many-locals,too-many-statements
|
||||
clang_format, commit_prior_to_reformat, commit_after_reformat):
|
||||
"""Reformat a branch made before a clang-format run."""
|
||||
clang_format = ClangFormat(clang_format, _get_build_dir())
|
||||
|
||||
if os.getcwd() != git.get_base_dir():
|
||||
@ -515,16 +495,14 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
|
||||
|
||||
|
||||
def usage():
|
||||
"""Print usage
|
||||
"""
|
||||
"""Print usage."""
|
||||
print(
|
||||
"clang-format.py supports 5 commands [ lint, lint-all, lint-patch, format, reformat-branch]."
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point
|
||||
"""
|
||||
"""Execute Main entry point."""
|
||||
parser = OptionParser()
|
||||
parser.add_option("-c", "--clang-format", type="string", dest="clang_format")
|
||||
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Collect system resource information on processes running in Evergreen on a given interval.
|
||||
"""
|
||||
"""Collect system resource information on processes running in Evergreen on a given interval."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
@ -19,10 +17,11 @@ import requests
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts.resmokelib import utils
|
||||
from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
|
||||
|
||||
|
||||
def main():
|
||||
"""Main."""
|
||||
usage = "usage: %prog [options]"
|
||||
parser = optparse.OptionParser(description=__doc__, usage=usage)
|
||||
parser.add_option("-i", "--interval", dest="interval", default=5, type="int",
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Combines JSON report files used in Evergreen
|
||||
"""
|
||||
"""Combine JSON report files used in Evergreen."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
@ -16,20 +14,24 @@ from optparse import OptionParser
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts.resmokelib.testing import report
|
||||
from buildscripts.resmokelib import utils
|
||||
from buildscripts.resmokelib.testing import report # pylint: disable=wrong-import-position
|
||||
from buildscripts.resmokelib import utils # pylint: disable=wrong-import-position
|
||||
|
||||
|
||||
def read_json_file(json_file):
|
||||
"""Read JSON file."""
|
||||
with open(json_file) as json_data:
|
||||
return json.load(json_data)
|
||||
|
||||
|
||||
def report_exit(combined_test_report):
|
||||
"""The exit code of this script is based on the following:
|
||||
"""Return report exit code.
|
||||
|
||||
The exit code of this script is based on the following:
|
||||
0: All tests have status "pass", or only non-dynamic tests have status "silentfail".
|
||||
31: At least one test has status "fail" or "timeout".
|
||||
Note: A test can be considered dynamic if its name contains a ":" character."""
|
||||
Note: A test can be considered dynamic if its name contains a ":" character.
|
||||
"""
|
||||
|
||||
ret = 0
|
||||
for test in combined_test_report.test_infos:
|
||||
@ -39,9 +41,7 @@ def report_exit(combined_test_report):
|
||||
|
||||
|
||||
def check_error(input_count, output_count):
|
||||
"""
|
||||
Error if both input and output exist, or if neither exist.
|
||||
"""
|
||||
"""Raise error if both input and output exist, or if neither exist."""
|
||||
if (not input_count) and (not output_count):
|
||||
raise ValueError("None of the input file(s) or output file exists")
|
||||
|
||||
@ -50,6 +50,7 @@ def check_error(input_count, output_count):
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
usage = "usage: %prog [options] report1.json report2.json ..."
|
||||
parser = OptionParser(description=__doc__, usage=usage)
|
||||
parser.add_option("-o", "--output-file", dest="outfile", default="-",
|
||||
@ -73,9 +74,9 @@ def main():
|
||||
try:
|
||||
report_file_json = read_json_file(report_file)
|
||||
test_reports.append(report.TestReport.from_dict(report_file_json))
|
||||
except IOError as e:
|
||||
except IOError as err:
|
||||
# errno.ENOENT is the error code for "No such file or directory".
|
||||
if e.errno == errno.ENOENT:
|
||||
if err.errno == errno.ENOENT:
|
||||
report_files_count -= 1
|
||||
continue
|
||||
raise
|
||||
|
@ -1,58 +1,40 @@
|
||||
#!/usr/bin/env python
|
||||
"""Produces a report of all assertions in the MongoDB server codebase.
|
||||
"""Produce a report of all assertions in the MongoDB server codebase.
|
||||
|
||||
Parses .cpp files for assertions and verifies assertion codes are distinct.
|
||||
Optionally replaces zero codes in source code with new distinct values.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import bisect
|
||||
import os
|
||||
import sys
|
||||
import utils
|
||||
from collections import defaultdict, namedtuple
|
||||
from optparse import OptionParser
|
||||
|
||||
from buildscripts import utils
|
||||
|
||||
try:
|
||||
import regex as re
|
||||
except ImportError:
|
||||
print("*** Run 'pip2 install --user regex' to speed up error code checking")
|
||||
import re
|
||||
import re # type: ignore
|
||||
|
||||
ASSERT_NAMES = ["uassert", "massert", "fassert", "fassertFailed"]
|
||||
MINIMUM_CODE = 10000
|
||||
|
||||
codes = []
|
||||
# pylint: disable=invalid-name
|
||||
codes = [] # type: ignore
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
# Each AssertLocation identifies the C++ source location of an assertion
|
||||
AssertLocation = namedtuple("AssertLocation", ['sourceFile', 'byteOffset', 'lines', 'code'])
|
||||
|
||||
list_files = False
|
||||
list_files = False # pylint: disable=invalid-name
|
||||
|
||||
|
||||
# Of historical interest only
|
||||
def assignErrorCodes():
|
||||
cur = MINIMUM_CODE
|
||||
for root in ASSERT_NAMES:
|
||||
for x in utils.getAllSourceFiles():
|
||||
print(x)
|
||||
didAnything = False
|
||||
fixed = ""
|
||||
for line in open(x):
|
||||
s = line.partition(root + "(")
|
||||
if s[1] == "" or line.startswith("#define " + root):
|
||||
fixed += line
|
||||
continue
|
||||
fixed += s[0] + root + "( " + str(cur) + " , " + s[2]
|
||||
cur = cur + 1
|
||||
didAnything = True
|
||||
if didAnything:
|
||||
out = open(x, 'w')
|
||||
out.write(fixed)
|
||||
out.close()
|
||||
|
||||
|
||||
def parseSourceFiles(callback):
|
||||
"""Walks MongoDB sourcefiles and invokes callback for each AssertLocation found."""
|
||||
def parse_source_files(callback):
|
||||
"""Walk MongoDB sourcefiles and invoke a callback for each AssertLocation found."""
|
||||
|
||||
quick = ["assert", "Exception", "ErrorCodes::Error"]
|
||||
|
||||
@ -64,12 +46,12 @@ def parseSourceFiles(callback):
|
||||
re.compile(r"ErrorCodes::Error\s*[({]\s*(\d+)", re.MULTILINE)
|
||||
]
|
||||
|
||||
for sourceFile in utils.getAllSourceFiles(prefix='src/mongo/'):
|
||||
for source_file in utils.get_all_source_files(prefix='src/mongo/'):
|
||||
if list_files:
|
||||
print 'scanning file: ' + sourceFile
|
||||
print('scanning file: ' + source_file)
|
||||
|
||||
with open(sourceFile) as f:
|
||||
text = f.read()
|
||||
with open(source_file) as fh:
|
||||
text = fh.read()
|
||||
|
||||
if not any([zz in text for zz in quick]):
|
||||
continue
|
||||
@ -78,22 +60,24 @@ def parseSourceFiles(callback):
|
||||
for matchiter in matchiters:
|
||||
for match in matchiter:
|
||||
code = match.group(1)
|
||||
codeOffset = match.start(1)
|
||||
code_offset = match.start(1)
|
||||
|
||||
# Note that this will include the text of the full match but will report the
|
||||
# position of the beginning of the code portion rather than the beginning of the
|
||||
# match. This is to position editors on the spot that needs to change.
|
||||
thisLoc = AssertLocation(sourceFile, codeOffset,
|
||||
text[match.start():match.end()], code)
|
||||
this_loc = AssertLocation(source_file, code_offset,
|
||||
text[match.start():match.end()], code)
|
||||
|
||||
callback(thisLoc)
|
||||
callback(this_loc)
|
||||
|
||||
|
||||
# Converts an absolute position in a file into a line number.
|
||||
def getLineAndColumnForPosition(loc, _file_cache={}):
|
||||
def get_line_and_column_for_position(loc, _file_cache=None):
|
||||
"""Convert an absolute position in a file into a line number."""
|
||||
if _file_cache is None:
|
||||
_file_cache = {}
|
||||
if loc.sourceFile not in _file_cache:
|
||||
with open(loc.sourceFile) as f:
|
||||
text = f.read()
|
||||
with open(loc.sourceFile) as fh:
|
||||
text = fh.read()
|
||||
line_offsets = [0]
|
||||
for line in text.splitlines(True):
|
||||
line_offsets.append(line_offsets[-1] + len(line))
|
||||
@ -105,150 +89,115 @@ def getLineAndColumnForPosition(loc, _file_cache={}):
|
||||
return (line, column)
|
||||
|
||||
|
||||
def isTerminated(lines):
|
||||
"""Given .cpp/.h source lines as text, determine if assert is terminated."""
|
||||
x = " ".join(lines)
|
||||
return ';' in x \
|
||||
or x.count('(') - x.count(')') <= 0
|
||||
def is_terminated(lines):
|
||||
"""Determine if assert is terminated, from .cpp/.h source lines as text."""
|
||||
code_block = " ".join(lines)
|
||||
return ';' in code_block or code_block.count('(') - code_block.count(')') <= 0
|
||||
|
||||
|
||||
def getNextCode():
|
||||
"""Finds next unused assertion code.
|
||||
def get_next_code():
|
||||
"""Find next unused assertion code.
|
||||
|
||||
Called by: SConstruct and main()
|
||||
Since SConstruct calls us, codes[] must be global OR WE REPARSE EVERYTHING
|
||||
"""
|
||||
if not len(codes) > 0:
|
||||
readErrorCodes()
|
||||
if not codes:
|
||||
read_error_codes()
|
||||
|
||||
highest = reduce(lambda x, y: max(int(x), int(y)), (loc.code for loc in codes))
|
||||
return highest + 1
|
||||
|
||||
|
||||
def checkErrorCodes():
|
||||
"""SConstruct expects a boolean response from this function.
|
||||
"""
|
||||
(codes, errors) = readErrorCodes()
|
||||
def check_error_codes():
|
||||
"""Check error codes as SConstruct expects a boolean response from this function."""
|
||||
(_, errors) = read_error_codes()
|
||||
return len(errors) == 0
|
||||
|
||||
|
||||
def readErrorCodes():
|
||||
"""Defines callback, calls parseSourceFiles() with callback,
|
||||
and saves matches to global codes list.
|
||||
"""
|
||||
def read_error_codes():
|
||||
"""Define callback, call parse_source_files() with callback, save matches to global codes list."""
|
||||
seen = {}
|
||||
errors = []
|
||||
dups = defaultdict(list)
|
||||
|
||||
# define callback
|
||||
def checkDups(assertLoc):
|
||||
codes.append(assertLoc)
|
||||
code = assertLoc.code
|
||||
def check_dups(assert_loc):
|
||||
"""Check for duplicates."""
|
||||
codes.append(assert_loc)
|
||||
code = assert_loc.code
|
||||
|
||||
if not code in seen:
|
||||
seen[code] = assertLoc
|
||||
seen[code] = assert_loc
|
||||
else:
|
||||
if not code in dups:
|
||||
# on first duplicate, add original to dups, errors
|
||||
dups[code].append(seen[code])
|
||||
errors.append(seen[code])
|
||||
|
||||
dups[code].append(assertLoc)
|
||||
errors.append(assertLoc)
|
||||
dups[code].append(assert_loc)
|
||||
errors.append(assert_loc)
|
||||
|
||||
parseSourceFiles(checkDups)
|
||||
parse_source_files(check_dups)
|
||||
|
||||
if seen.has_key("0"):
|
||||
if "0" in seen:
|
||||
code = "0"
|
||||
bad = seen[code]
|
||||
errors.append(bad)
|
||||
line, col = getLineAndColumnForPosition(bad)
|
||||
line, col = get_line_and_column_for_position(bad)
|
||||
print("ZERO_CODE:")
|
||||
print(" %s:%d:%d:%s" % (bad.sourceFile, line, col, bad.lines))
|
||||
|
||||
for code, locations in dups.items():
|
||||
print("DUPLICATE IDS: %s" % code)
|
||||
for loc in locations:
|
||||
line, col = getLineAndColumnForPosition(loc)
|
||||
line, col = get_line_and_column_for_position(loc)
|
||||
print(" %s:%d:%d:%s" % (loc.sourceFile, line, col, loc.lines))
|
||||
|
||||
return (codes, errors)
|
||||
|
||||
|
||||
def replaceBadCodes(errors, nextCode):
|
||||
"""Modifies C++ source files to replace invalid assertion codes.
|
||||
def replace_bad_codes(errors, next_code): # pylint: disable=too-many-locals
|
||||
"""Modify C++ source files to replace invalid assertion codes.
|
||||
|
||||
For now, we only modify zero codes.
|
||||
|
||||
Args:
|
||||
errors: list of AssertLocation
|
||||
nextCode: int, next non-conflicting assertion code
|
||||
next_code: int, next non-conflicting assertion code
|
||||
"""
|
||||
zero_errors = [e for e in errors if int(e.code) == 0]
|
||||
skip_errors = [e for e in errors if int(e.code) != 0]
|
||||
|
||||
for loc in skip_errors:
|
||||
line, col = getLineAndColumnForPosition(loc)
|
||||
line, col = get_line_and_column_for_position(loc)
|
||||
print("SKIPPING NONZERO code=%s: %s:%d:%d" % (loc.code, loc.sourceFile, line, col))
|
||||
|
||||
# Dedupe, sort, and reverse so we don't have to update offsets as we go.
|
||||
for assertLoc in reversed(sorted(set(zero_errors))):
|
||||
(sourceFile, byteOffset, lines, code) = assertLoc
|
||||
lineNum, _ = getLineAndColumnForPosition(assertLoc)
|
||||
print "UPDATING_FILE: %s:%s" % (sourceFile, lineNum)
|
||||
for assert_loc in reversed(sorted(set(zero_errors))):
|
||||
(source_file, byte_offset, _, _) = assert_loc
|
||||
line_num, _ = get_line_and_column_for_position(assert_loc)
|
||||
print("UPDATING_FILE: %s:%s" % (source_file, line_num))
|
||||
|
||||
ln = lineNum - 1
|
||||
ln = line_num - 1
|
||||
|
||||
with open(sourceFile, 'r+') as f:
|
||||
print "LINE_%d_BEFORE:%s" % (lineNum, f.readlines()[ln].rstrip())
|
||||
with open(source_file, 'r+') as fh:
|
||||
print("LINE_%d_BEFORE:%s" % (line_num, fh.readlines()[ln].rstrip()))
|
||||
|
||||
f.seek(0)
|
||||
text = f.read()
|
||||
assert text[byteOffset] == '0'
|
||||
f.seek(0)
|
||||
f.write(text[:byteOffset])
|
||||
f.write(str(nextCode))
|
||||
f.write(text[byteOffset + 1:])
|
||||
f.seek(0)
|
||||
fh.seek(0)
|
||||
text = fh.read()
|
||||
assert text[byte_offset] == '0'
|
||||
fh.seek(0)
|
||||
fh.write(text[:byte_offset])
|
||||
fh.write(str(next_code))
|
||||
fh.write(text[byte_offset + 1:])
|
||||
fh.seek(0)
|
||||
|
||||
print "LINE_%d_AFTER :%s" % (lineNum, f.readlines()[ln].rstrip())
|
||||
nextCode += 1
|
||||
|
||||
|
||||
def getBestMessage(lines, codeStr):
|
||||
"""Extracts message from one AssertionLocation.lines entry
|
||||
|
||||
Args:
|
||||
lines: list of contiguous C++ source lines
|
||||
codeStr: assertion code found in first line
|
||||
"""
|
||||
line = lines if isinstance(lines, str) else " ".join(lines)
|
||||
|
||||
err = line.partition(codeStr)[2]
|
||||
if not err:
|
||||
return ""
|
||||
|
||||
# Trim to outer quotes
|
||||
m = re.search(r'"(.*)"', err)
|
||||
if not m:
|
||||
return ""
|
||||
err = m.group(1)
|
||||
|
||||
# Trim inner quote pairs
|
||||
err = re.sub(r'" +"', '', err)
|
||||
err = re.sub(r'" *<< *"', '', err)
|
||||
err = re.sub(r'" *<<[^<]+<< *"', '<X>', err)
|
||||
err = re.sub(r'" *\+[^+]+\+ *"', '<X>', err)
|
||||
|
||||
# Trim escaped quotes
|
||||
err = re.sub(r'\\"', '', err)
|
||||
|
||||
# Iff doublequote still present, trim that and any trailing text
|
||||
err = re.sub(r'".*$', '', err)
|
||||
|
||||
return err.strip()
|
||||
print("LINE_%d_AFTER :%s" % (line_num, fh.readlines()[ln].rstrip()))
|
||||
next_code += 1
|
||||
|
||||
|
||||
def main():
|
||||
"""Main."""
|
||||
parser = OptionParser(description=__doc__.strip())
|
||||
parser.add_option("--fix", dest="replace", action="store_true", default=False,
|
||||
help="Fix zero codes in source files [default: %default]")
|
||||
@ -256,28 +205,28 @@ def main():
|
||||
help="Suppress output on success [default: %default]")
|
||||
parser.add_option("--list-files", dest="list_files", action="store_true", default=False,
|
||||
help="Print the name of each file as it is scanned [default: %default]")
|
||||
(options, args) = parser.parse_args()
|
||||
(options, _) = parser.parse_args()
|
||||
|
||||
global list_files
|
||||
global list_files # pylint: disable=global-statement,invalid-name
|
||||
list_files = options.list_files
|
||||
|
||||
(codes, errors) = readErrorCodes()
|
||||
(_, errors) = read_error_codes()
|
||||
ok = len(errors) == 0
|
||||
|
||||
if ok and options.quiet:
|
||||
return
|
||||
|
||||
next = getNextCode()
|
||||
next_code = get_next_code()
|
||||
|
||||
print("ok: %s" % ok)
|
||||
print("next: %s" % next)
|
||||
print("next: %s" % next_code)
|
||||
|
||||
if ok:
|
||||
sys.exit(0)
|
||||
elif options.replace:
|
||||
replaceBadCodes(errors, next)
|
||||
replace_bad_codes(errors, next_code)
|
||||
else:
|
||||
print ERROR_HELP
|
||||
print(ERROR_HELP)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -1,15 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
"""ESLint module.
|
||||
|
||||
Will download a prebuilt ESLint binary if necessary (i.e. it isn't installed, isn't in the current
|
||||
path, or is the wrong version). It works in much the same way as clang_format.py. In lint mode, it
|
||||
will lint the files or directory paths passed. In lint-patch mode, for upload.py, it will see if
|
||||
there are any candidate files in the supplied patch. Fix mode will run ESLint with the --fix
|
||||
option, and that will update the files with missing semicolons and similar repairable issues.
|
||||
There is also a -d mode that assumes you only want to run one copy of ESLint per file / directory
|
||||
parameter supplied. This lets ESLint search for candidate files to lint.
|
||||
"""
|
||||
eslint.py
|
||||
Will download a prebuilt ESLint binary if necessary (i.e. it isn't installed, isn't in the current
|
||||
path, or is the wrong version). It works in much the same way as clang_format.py. In lint mode, it
|
||||
will lint the files or directory paths passed. In lint-patch mode, for upload.py, it will see if
|
||||
there are any candidate files in the supplied patch. Fix mode will run ESLint with the --fix
|
||||
option, and that will update the files with missing semicolons and similar repairable issues.
|
||||
There is also a -d mode that assumes you only want to run one copy of ESLint per file / directory
|
||||
parameter supplied. This lets ESLint search for candidate files to lint.
|
||||
"""
|
||||
import itertools
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import string
|
||||
@ -19,17 +21,17 @@ import tarfile
|
||||
import tempfile
|
||||
import threading
|
||||
import urllib
|
||||
from distutils import spawn
|
||||
from distutils import spawn # pylint: disable=no-name-in-module
|
||||
from optparse import OptionParser
|
||||
|
||||
# Get relative imports to work when the package is not installed on the PYTHONPATH.
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(os.path.realpath(__file__)))))
|
||||
|
||||
from buildscripts.resmokelib.utils import globstar
|
||||
from buildscripts.resmokelib.utils import globstar # pylint: disable=wrong-import-position
|
||||
|
||||
from buildscripts.linter import git
|
||||
from buildscripts.linter import parallel
|
||||
from buildscripts.linter import git # pylint: disable=wrong-import-position
|
||||
from buildscripts.linter import parallel # pylint: disable=wrong-import-position
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
@ -54,12 +56,12 @@ ESLINT_SOURCE_TAR_BASE = string.Template(ESLINT_PROGNAME + "-$platform-$arch")
|
||||
|
||||
|
||||
def callo(args):
|
||||
"""Call a program, and capture its output
|
||||
"""
|
||||
"""Call a program, and capture its output."""
|
||||
return subprocess.check_output(args)
|
||||
|
||||
|
||||
def extract_eslint(tar_path, target_file):
|
||||
"""Extract ESLint tar file."""
|
||||
tarfp = tarfile.open(tar_path)
|
||||
for name in tarfp.getnames():
|
||||
if name == target_file:
|
||||
@ -68,8 +70,7 @@ def extract_eslint(tar_path, target_file):
|
||||
|
||||
|
||||
def get_eslint_from_cache(dest_file, platform, arch):
|
||||
"""Get ESLint binary from mongodb's cache
|
||||
"""
|
||||
"""Get ESLint binary from mongodb's cache."""
|
||||
# Get URL
|
||||
if platform == "Linux":
|
||||
url = ESLINT_HTTP_LINUX_CACHE
|
||||
@ -91,10 +92,10 @@ def get_eslint_from_cache(dest_file, platform, arch):
|
||||
|
||||
|
||||
class ESLint(object):
|
||||
"""Class encapsulates finding a suitable copy of ESLint, and linting an individual file
|
||||
"""
|
||||
"""Class encapsulates finding a suitable copy of ESLint, and linting an individual file."""
|
||||
|
||||
def __init__(self, path, cache_dir):
|
||||
def __init__(self, path, cache_dir): # pylint: disable=too-many-branches
|
||||
"""Initialize ESLint."""
|
||||
eslint_progname = ESLINT_PROGNAME
|
||||
|
||||
# Initialize ESLint configuration information
|
||||
@ -150,8 +151,7 @@ class ESLint(object):
|
||||
self.print_lock = threading.Lock()
|
||||
|
||||
def _validate_version(self, warn=False):
|
||||
"""Validate ESLint is the expected version
|
||||
"""
|
||||
"""Validate ESLint is the expected version."""
|
||||
esl_version = callo([self.path, "--version"]).rstrip()
|
||||
# Ignore the leading v in the version string.
|
||||
if ESLINT_VERSION == esl_version[1:]:
|
||||
@ -163,52 +163,43 @@ class ESLint(object):
|
||||
return False
|
||||
|
||||
def _lint(self, file_name, print_diff):
|
||||
"""Check the specified file for linting errors
|
||||
"""
|
||||
"""Check the specified file for linting errors."""
|
||||
# ESLint returns non-zero on a linting error. That's all we care about
|
||||
# so only enter the printing logic if we have an error.
|
||||
try:
|
||||
eslint_output = callo([self.path, "-f", "unix", file_name])
|
||||
except subprocess.CalledProcessError as e:
|
||||
callo([self.path, "-f", "unix", file_name])
|
||||
except subprocess.CalledProcessError as err:
|
||||
if print_diff:
|
||||
# Take a lock to ensure error messages do not get mixed when printed to the screen
|
||||
with self.print_lock:
|
||||
print("ERROR: ESLint found errors in " + file_name)
|
||||
print(e.output)
|
||||
return False
|
||||
except:
|
||||
print("ERROR: ESLint process threw unexpected error", sys.exc_info()[0])
|
||||
print(err.output)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def lint(self, file_name):
|
||||
"""Check the specified file has no linting errors
|
||||
"""
|
||||
"""Check the specified file has no linting errors."""
|
||||
return self._lint(file_name, print_diff=True)
|
||||
|
||||
def autofix(self, file_name):
|
||||
""" Run ESLint in fix mode.
|
||||
"""
|
||||
"""Run ESLint in fix mode."""
|
||||
return not subprocess.call([self.path, "--fix", file_name])
|
||||
|
||||
|
||||
def is_interesting_file(file_name):
|
||||
""""Return true if this file should be checked
|
||||
"""
|
||||
"""Return true if this file should be checked."""
|
||||
return ((file_name.startswith("src/mongo") or file_name.startswith("jstests"))
|
||||
and file_name.endswith(".js"))
|
||||
|
||||
|
||||
def _get_build_dir():
|
||||
"""Get the location of the scons build directory in case we need to download ESLint
|
||||
"""
|
||||
"""Get the location of the scons build directory in case we need to download ESLint."""
|
||||
return os.path.join(git.get_base_dir(), "build")
|
||||
|
||||
|
||||
def _lint_files(eslint, files):
|
||||
"""Lint a list of files with ESLint
|
||||
"""
|
||||
"""Lint a list of files with ESLint."""
|
||||
eslint = ESLint(eslint, _get_build_dir())
|
||||
|
||||
lint_clean = parallel.parallel_process([os.path.abspath(f) for f in files], eslint.lint)
|
||||
@ -222,8 +213,7 @@ def _lint_files(eslint, files):
|
||||
|
||||
|
||||
def lint_patch(eslint, infile):
|
||||
"""Lint patch command entry point
|
||||
"""
|
||||
"""Lint patch command entry point."""
|
||||
files = git.get_files_to_check_from_patch(infile, is_interesting_file)
|
||||
|
||||
# Patch may have files that we do not want to check which is fine
|
||||
@ -233,12 +223,11 @@ def lint_patch(eslint, infile):
|
||||
|
||||
|
||||
def lint(eslint, dirmode, glob):
|
||||
"""Lint files command entry point
|
||||
"""
|
||||
"""Lint files command entry point."""
|
||||
if dirmode and glob:
|
||||
files = glob
|
||||
else:
|
||||
files = get_files_to_check(glob, is_interesting_file)
|
||||
files = git.get_files_to_check(glob, is_interesting_file)
|
||||
|
||||
_lint_files(eslint, files)
|
||||
|
||||
@ -246,8 +235,7 @@ def lint(eslint, dirmode, glob):
|
||||
|
||||
|
||||
def _autofix_files(eslint, files):
|
||||
"""Auto-fix the specified files with ESLint.
|
||||
"""
|
||||
"""Auto-fix the specified files with ESLint."""
|
||||
eslint = ESLint(eslint, _get_build_dir())
|
||||
|
||||
autofix_clean = parallel.parallel_process([os.path.abspath(f) for f in files], eslint.autofix)
|
||||
@ -255,22 +243,21 @@ def _autofix_files(eslint, files):
|
||||
if not autofix_clean:
|
||||
print("ERROR: failed to auto-fix files")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def autofix_func(eslint, dirmode, glob):
|
||||
"""Auto-fix files command entry point
|
||||
"""
|
||||
"""Auto-fix files command entry point."""
|
||||
if dirmode:
|
||||
files = glob
|
||||
else:
|
||||
files = get_files_to_check(glob, is_interesting_file)
|
||||
files = git.get_files_to_check(glob, is_interesting_file)
|
||||
|
||||
return _autofix_files(eslint, files)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point
|
||||
"""
|
||||
"""Execute Main entry point."""
|
||||
success = False
|
||||
usage = "%prog [-e <eslint>] [-d] lint|lint-patch|fix [glob patterns] "
|
||||
description = "lint runs ESLint on provided patterns or all .js files under jstests/ "\
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Command line utility for executing MongoDB tests in Evergreen.
|
||||
"""
|
||||
"""Command line utility for executing MongoDB tests in Evergreen."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -13,34 +11,42 @@ import sys
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts import resmoke
|
||||
from buildscripts import resmokelib
|
||||
from buildscripts import resmoke # pylint: disable=wrong-import-position
|
||||
from buildscripts import resmokelib # pylint: disable=wrong-import-position
|
||||
|
||||
_TagInfo = collections.namedtuple("_TagInfo", ["tag_name", "evergreen_aware", "suite_options"])
|
||||
|
||||
|
||||
class Main(resmoke.Main):
|
||||
"""
|
||||
"""Execute Main class.
|
||||
|
||||
A class for executing potentially multiple resmoke.py test suites in a way that handles
|
||||
additional options for running unreliable tests in Evergreen.
|
||||
"""
|
||||
|
||||
UNRELIABLE_TAG = _TagInfo(tag_name="unreliable", evergreen_aware=True,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
|
||||
report_failure_status="silentfail"))
|
||||
UNRELIABLE_TAG = _TagInfo(
|
||||
tag_name="unreliable",
|
||||
evergreen_aware=True,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
|
||||
report_failure_status="silentfail"))
|
||||
|
||||
RESOURCE_INTENSIVE_TAG = _TagInfo(
|
||||
tag_name="resource_intensive", evergreen_aware=False,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(num_jobs=1))
|
||||
tag_name="resource_intensive",
|
||||
evergreen_aware=False,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
|
||||
num_jobs=1))
|
||||
|
||||
RETRY_ON_FAILURE_TAG = _TagInfo(
|
||||
tag_name="retry_on_failure", evergreen_aware=True,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace(
|
||||
tag_name="retry_on_failure",
|
||||
evergreen_aware=True,
|
||||
suite_options=resmokelib.config.SuiteOptions.ALL_INHERITED._replace( # type: ignore
|
||||
fail_fast=False, num_repeats=2, report_failure_status="silentfail"))
|
||||
|
||||
def _make_evergreen_aware_tags(self, tag_name):
|
||||
"""
|
||||
Returns a list of resmoke.py tags for task, variant, and distro combinations in Evergreen.
|
||||
@staticmethod
|
||||
def _make_evergreen_aware_tags(tag_name):
|
||||
"""Return a list of resmoke.py tags.
|
||||
|
||||
This list is for task, variant, and distro combinations in Evergreen.
|
||||
"""
|
||||
|
||||
tags_format = ["{tag_name}"]
|
||||
@ -62,9 +68,10 @@ class Main(resmoke.Main):
|
||||
|
||||
@classmethod
|
||||
def _make_tag_combinations(cls):
|
||||
"""
|
||||
Returns a list of (tag, enabled) pairs representing all possible combinations of all
|
||||
possible pairings of whether the tags are enabled or disabled together.
|
||||
"""Return a list of (tag, enabled) pairs.
|
||||
|
||||
These pairs represent all possible combinations of all possible pairings
|
||||
of whether the tags are enabled or disabled together.
|
||||
"""
|
||||
|
||||
combinations = []
|
||||
@ -96,8 +103,7 @@ class Main(resmoke.Main):
|
||||
return combinations
|
||||
|
||||
def _get_suites(self):
|
||||
"""
|
||||
Returns a list of resmokelib.testing.suite.Suite instances to execute.
|
||||
"""Return a list of resmokelib.testing.suite.Suite instances to execute.
|
||||
|
||||
For every resmokelib.testing.suite.Suite instance returned by resmoke.Main._get_suites(),
|
||||
multiple copies of that test suite are run using different resmokelib.config.SuiteOptions()
|
||||
|
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
"""Script to retrieve the etc/test_lifecycle.yml tag file from the metadata repository that
|
||||
corresponds to the current repository.
|
||||
"""Retrieve the etc/test_lifecycle.yml tag file from the metadata repository.
|
||||
|
||||
This is performed for the current repository.
|
||||
|
||||
Usage:
|
||||
python buildscsripts/fetch_test_lifecycle.py evergreen-project revision
|
||||
@ -13,7 +14,6 @@ import logging
|
||||
import optparse
|
||||
import os
|
||||
import posixpath
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
@ -23,7 +23,7 @@ import yaml
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts import git
|
||||
from buildscripts import git # pylint: disable=wrong-import-position
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -31,8 +31,9 @@ LOGGER = logging.getLogger(__name__)
|
||||
class MetadataRepository(object):
|
||||
"""Represent the metadata repository containing the test lifecycle tags file."""
|
||||
|
||||
def __init__(self, repository, references_file, lifecycle_file):
|
||||
"""Initlialize the MetadataRepository.
|
||||
def __init__(self, repository, references_file,
|
||||
lifecycle_file): # noqa: D214,D405,D406,D407,D411,D413
|
||||
"""Initialize the MetadataRepository.
|
||||
|
||||
Args:
|
||||
repository: the git.Repository object for the repository.
|
||||
@ -47,13 +48,13 @@ class MetadataRepository(object):
|
||||
# The path to the lifecycle file, absolute or relative to the current working directory.
|
||||
self.lifecycle_path = os.path.join(repository.directory, lifecycle_file)
|
||||
|
||||
def list_revisions(self):
|
||||
def list_revisions(self): # noqa: D406,D407,D413
|
||||
"""List the revisions from the HEAD of this repository.
|
||||
|
||||
Returns:
|
||||
A list of str containing the git hashes for all the revisions from the newest (HEAD)
|
||||
to the oldest.
|
||||
"""
|
||||
"""
|
||||
return self._repository.git_rev_list(["HEAD", "--", self._lifecycle_file]).splitlines()
|
||||
|
||||
def _get_references_content(self, revision):
|
||||
@ -62,13 +63,15 @@ class MetadataRepository(object):
|
||||
return references_content
|
||||
|
||||
def get_reference(self, metadata_revision, project):
|
||||
"""Retrieve the reference revision (a revision of the project 'project') associated with
|
||||
the test lifecycle file present in the metadata repository at revision 'metadata_revision'.
|
||||
"""Retrieve the reference revision (a revision of the project 'project').
|
||||
|
||||
Args:
|
||||
metadata_revision: a revision (git hash) of this repository.
|
||||
project: an Evergreen project name (e.g. mongodb-mongo-master).
|
||||
"""
|
||||
The revision is associated with the test lifecycle file present in the metadata repository
|
||||
at revision 'metadata_revision'.
|
||||
|
||||
Args:
|
||||
metadata_revision: a revision (git hash) of this repository.
|
||||
project: an Evergreen project name (e.g. mongodb-mongo-master).
|
||||
"""
|
||||
references_content = self._get_references_content(metadata_revision)
|
||||
references = yaml.safe_load(references_content)
|
||||
return references.get("test-lifecycle", {}).get(project)
|
||||
@ -100,9 +103,9 @@ def _get_metadata_revision(metadata_repo, mongo_repo, project, revision):
|
||||
return None
|
||||
|
||||
|
||||
def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, project, revision):
|
||||
"""Fetch the test lifecycle file that corresponds to the given revision of the repository this
|
||||
script is called from.
|
||||
def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, project,
|
||||
revision): # noqa: D214,D405,D406,D407,D411,D413
|
||||
"""Fetch the test lifecycle file for the revision in the repository this script is invoked.
|
||||
|
||||
Args:
|
||||
metadata_repo_url: the git repository URL for the metadata repository containing the test
|
||||
@ -127,7 +130,8 @@ def fetch_test_lifecycle(metadata_repo_url, references_file, lifecycle_file, pro
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
"""Execute Main program.
|
||||
|
||||
Utility to fetch the etc/test_lifecycle.yml file corresponding to a given revision from
|
||||
the mongo-test-metadata repository.
|
||||
"""
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""GDB commands for MongoDB
|
||||
"""
|
||||
"""GDB commands for MongoDB."""
|
||||
from __future__ import print_function
|
||||
|
||||
import gdb
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import gdb
|
||||
|
||||
|
||||
def get_process_name():
|
||||
"""Return the main binary we are attached to."""
|
||||
@ -16,7 +16,7 @@ def get_process_name():
|
||||
|
||||
|
||||
def get_thread_id():
|
||||
"""Returns the thread_id of the current GDB thread"""
|
||||
"""Return the thread_id of the current GDB thread."""
|
||||
# GDB thread example:
|
||||
# RHEL
|
||||
# [Current thread is 1 (Thread 0x7f072426cca0 (LWP 12867))]
|
||||
@ -41,25 +41,36 @@ def get_thread_id():
|
||||
# Commands
|
||||
#
|
||||
###################################################################################################
|
||||
# Dictionary of commands so we can write a help function that describes the MongoDB commands.
|
||||
mongo_commands = {}
|
||||
|
||||
|
||||
def register_mongo_command(obj, name, command_class):
|
||||
"""Register a command with no completer as a mongo command"""
|
||||
global mongo_commands
|
||||
gdb.Command.__init__(obj, name, command_class)
|
||||
class RegisterMongoCommand(object):
|
||||
"""Class to register mongo commands with GDB."""
|
||||
|
||||
mongo_commands[name] = obj.__doc__
|
||||
_MONGO_COMMANDS = {} # type: ignore
|
||||
|
||||
@classmethod
|
||||
def register(cls, obj, name, command_class):
|
||||
"""Register a command with no completer as a mongo command."""
|
||||
gdb.Command.__init__(obj, name, command_class)
|
||||
cls._MONGO_COMMANDS[name] = obj.__doc__
|
||||
|
||||
@classmethod
|
||||
def print_commands(cls):
|
||||
"""Print the registered mongo commands."""
|
||||
print("Command - Description")
|
||||
for key in cls._MONGO_COMMANDS:
|
||||
print("%s - %s" % (key, cls._MONGO_COMMANDS[key]))
|
||||
|
||||
|
||||
class DumpGlobalServiceContext(gdb.Command):
|
||||
"""Dump the Global Service Context"""
|
||||
"""Dump the Global Service Context."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-service-context", gdb.COMMAND_DATA)
|
||||
"""Initialize DumpGlobalServiceContext."""
|
||||
RegisterMongoCommand.register(self, "mongodb-service-context", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
|
||||
"""Invoke GDB command to print the Global Service Context."""
|
||||
gdb.execute("print *('mongo::(anonymous namespace)::globalServiceContext')")
|
||||
|
||||
|
||||
@ -68,12 +79,14 @@ DumpGlobalServiceContext()
|
||||
|
||||
|
||||
class MongoDBDumpLocks(gdb.Command):
|
||||
"""Dump locks in mongod process"""
|
||||
"""Dump locks in mongod process."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-dump-locks", gdb.COMMAND_DATA)
|
||||
"""Initialize MongoDBDumpLocks."""
|
||||
RegisterMongoCommand.register(self, "mongodb-dump-locks", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
|
||||
"""Invoke MongoDBDumpLocks."""
|
||||
print("Running Hang Analyzer Supplement - MongoDBDumpLocks")
|
||||
|
||||
main_binary_name = get_process_name()
|
||||
@ -82,8 +95,9 @@ class MongoDBDumpLocks(gdb.Command):
|
||||
else:
|
||||
print("Not invoking mongod lock dump for: %s" % (main_binary_name))
|
||||
|
||||
def dump_mongod_locks(self):
|
||||
"""GDB in-process python supplement"""
|
||||
@staticmethod
|
||||
def dump_mongod_locks():
|
||||
"""GDB in-process python supplement."""
|
||||
|
||||
try:
|
||||
# Call into mongod, and dump the state of lock manager
|
||||
@ -99,12 +113,14 @@ MongoDBDumpLocks()
|
||||
|
||||
|
||||
class BtIfActive(gdb.Command):
|
||||
"""Print stack trace or a short message if the current thread is idle"""
|
||||
"""Print stack trace or a short message if the current thread is idle."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-bt-if-active", gdb.COMMAND_DATA)
|
||||
"""Initialize BtIfActive."""
|
||||
RegisterMongoCommand.register(self, "mongodb-bt-if-active", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
|
||||
"""Invoke GDB to print stack trace."""
|
||||
try:
|
||||
idle_location = gdb.parse_and_eval("mongo::for_debuggers::idleThreadLocation")
|
||||
except gdb.error:
|
||||
@ -121,14 +137,16 @@ BtIfActive()
|
||||
|
||||
|
||||
class MongoDBUniqueStack(gdb.Command):
|
||||
"""Print unique stack traces of all threads in current process"""
|
||||
"""Print unique stack traces of all threads in current process."""
|
||||
|
||||
_HEADER_FORMAT = "Thread {gdb_thread_num}: {name} (Thread {pthread} (LWP {lwpid})):"
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-uniqstack", gdb.COMMAND_DATA)
|
||||
"""Initialize MongoDBUniqueStack."""
|
||||
RegisterMongoCommand.register(self, "mongodb-uniqstack", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
"""Invoke GDB to dump stacks."""
|
||||
stacks = {}
|
||||
if not arg:
|
||||
arg = 'bt' # default to 'bt'
|
||||
@ -145,7 +163,9 @@ class MongoDBUniqueStack(gdb.Command):
|
||||
if current_thread and current_thread.is_valid():
|
||||
current_thread.switch()
|
||||
|
||||
def _get_current_thread_name(self):
|
||||
@staticmethod
|
||||
def _get_current_thread_name():
|
||||
"""Return the current thread name."""
|
||||
fallback_name = '"%s"' % (gdb.selected_thread().name or '')
|
||||
try:
|
||||
# This goes through the pretty printer for StringData which adds "" around the name.
|
||||
@ -157,6 +177,7 @@ class MongoDBUniqueStack(gdb.Command):
|
||||
return fallback_name
|
||||
|
||||
def _process_thread_stack(self, arg, stacks, thread):
|
||||
"""Process the thread stack."""
|
||||
thread_info = {} # thread dict to hold per thread data
|
||||
thread_info['pthread'] = get_thread_id()
|
||||
thread_info['gdb_thread_num'] = thread.num
|
||||
@ -186,9 +207,9 @@ class MongoDBUniqueStack(gdb.Command):
|
||||
except gdb.error as err:
|
||||
print("{} {}".format(thread_info['header'], err))
|
||||
break
|
||||
addrs = tuple(addrs) # tuples are hashable, lists aren't.
|
||||
addrs_tuple = tuple(addrs) # tuples are hashable, lists aren't.
|
||||
|
||||
unique = stacks.setdefault(addrs, {'threads': []})
|
||||
unique = stacks.setdefault(addrs_tuple, {'threads': []})
|
||||
unique['threads'].append(thread_info)
|
||||
if 'output' not in unique:
|
||||
try:
|
||||
@ -196,8 +217,12 @@ class MongoDBUniqueStack(gdb.Command):
|
||||
except gdb.error as err:
|
||||
print("{} {}".format(thread_info['header'], err))
|
||||
|
||||
def _dump_unique_stacks(self, stacks):
|
||||
@staticmethod
|
||||
def _dump_unique_stacks(stacks):
|
||||
"""Dump the unique stacks."""
|
||||
|
||||
def first_tid(stack):
|
||||
"""Return the first tid."""
|
||||
return stack['threads'][0]['gdb_thread_num']
|
||||
|
||||
for stack in sorted(stacks.values(), key=first_tid, reverse=True):
|
||||
@ -213,12 +238,14 @@ MongoDBUniqueStack()
|
||||
|
||||
|
||||
class MongoDBJavaScriptStack(gdb.Command):
|
||||
"""Print the JavaScript stack from a MongoDB process"""
|
||||
"""Print the JavaScript stack from a MongoDB process."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-javascript-stack", gdb.COMMAND_STATUS)
|
||||
"""Initialize MongoDBJavaScriptStack."""
|
||||
RegisterMongoCommand.register(self, "mongodb-javascript-stack", gdb.COMMAND_STATUS)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, arg, _from_tty): # pylint: disable=unused-argument
|
||||
"""Invoke GDB to dump JS stacks."""
|
||||
print("Running Print JavaScript Stack Supplement")
|
||||
|
||||
main_binary_name = get_process_name()
|
||||
@ -227,8 +254,9 @@ class MongoDBJavaScriptStack(gdb.Command):
|
||||
else:
|
||||
print("No JavaScript stack print done for: %s" % (main_binary_name))
|
||||
|
||||
def javascript_stack(self):
|
||||
"""GDB in-process python supplement"""
|
||||
@staticmethod
|
||||
def javascript_stack():
|
||||
"""GDB in-process python supplement."""
|
||||
|
||||
for thread in gdb.selected_inferior().threads():
|
||||
try:
|
||||
@ -257,15 +285,15 @@ MongoDBJavaScriptStack()
|
||||
|
||||
|
||||
class MongoDBHelp(gdb.Command):
|
||||
"""Dump list of mongodb commands"""
|
||||
"""Dump list of mongodb commands."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize MongoDBHelp."""
|
||||
gdb.Command.__init__(self, "mongodb-help", gdb.COMMAND_SUPPORT)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
print("Command - Description")
|
||||
for key in mongo_commands:
|
||||
print("%s - %s" % (key, mongo_commands[key]))
|
||||
def invoke(self, arg, _from_tty): # pylint: disable=no-self-use,unused-argument
|
||||
"""Register the mongo print commands."""
|
||||
RegisterMongoCommand.print_commands()
|
||||
|
||||
|
||||
# Register command
|
||||
|
@ -1,19 +1,26 @@
|
||||
"""Mongo lock module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
import gdb
|
||||
import gdb.printing
|
||||
import re
|
||||
import sys
|
||||
import mongo
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
# GDB only permits converting a gdb.Value instance to its numerical address when using the
|
||||
# long() constructor in Python 2 and not when using the int() constructor. We define the
|
||||
# 'long' class as an alias for the 'int' class in Python 3 for compatibility.
|
||||
long = int
|
||||
long = int # pylint: disable=redefined-builtin,invalid-name
|
||||
|
||||
|
||||
class Thread(object):
|
||||
"""Thread class."""
|
||||
|
||||
def __init__(self, thread_id, lwpid):
|
||||
"""Initialize Thread."""
|
||||
self.thread_id = thread_id
|
||||
self.lwpid = lwpid
|
||||
|
||||
@ -29,11 +36,15 @@ class Thread(object):
|
||||
return "Thread 0x{:012x} (LWP {})".format(self.thread_id, self.lwpid)
|
||||
|
||||
def key(self):
|
||||
"""Return thread key."""
|
||||
return "Thread 0x{:012x}".format(self.thread_id)
|
||||
|
||||
|
||||
class Lock(object):
|
||||
"""Lock class."""
|
||||
|
||||
def __init__(self, addr, resource):
|
||||
"""Initialize Lock."""
|
||||
self.addr = addr
|
||||
self.resource = resource
|
||||
|
||||
@ -49,35 +60,45 @@ class Lock(object):
|
||||
return "Lock 0x{:012x} ({})".format(self.addr, self.resource)
|
||||
|
||||
def key(self):
|
||||
"""Return lock key."""
|
||||
return "Lock 0x{:012x}".format(self.addr)
|
||||
|
||||
|
||||
class Graph(object):
|
||||
# The Graph is a dict with the following structure:
|
||||
# {'node_key': {'node': {id: val}, 'next_nodes': [node_key_1, ...]}}
|
||||
# Example graph:
|
||||
# {
|
||||
# 'Lock 1': {'node': {1: 'MongoDB lock'}, 'next_nodes': ['Thread 1']},
|
||||
# 'Lock 2': {'node': {2: 'MongoDB lock'}, 'next_nodes': ['Thread 2']},
|
||||
# 'Thread 1': {'node': {1: 123}, 'next_nodes': ['Lock 2']},
|
||||
# 'Thread 2': {'node': {2: 456}, 'next_nodes': ['Lock 1']}
|
||||
# }
|
||||
"""Graph class.
|
||||
|
||||
The Graph is a dict with the following structure:
|
||||
{'node_key': {'node': {id: val}, 'next_nodes': [node_key_1, ...]}}
|
||||
Example graph:
|
||||
{
|
||||
'Lock 1': {'node': {1: 'MongoDB lock'}, 'next_nodes': ['Thread 1']},
|
||||
'Lock 2': {'node': {2: 'MongoDB lock'}, 'next_nodes': ['Thread 2']},
|
||||
'Thread 1': {'node': {1: 123}, 'next_nodes': ['Lock 2']},
|
||||
'Thread 2': {'node': {2: 456}, 'next_nodes': ['Lock 1']}
|
||||
}
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Graph."""
|
||||
self.nodes = {}
|
||||
|
||||
def is_empty(self):
|
||||
"""Return True if graph is empty."""
|
||||
return not bool(self.nodes)
|
||||
|
||||
def add_node(self, node):
|
||||
"""Add node to graph."""
|
||||
if not self.find_node(node):
|
||||
self.nodes[node.key()] = {'node': node, 'next_nodes': []}
|
||||
|
||||
def find_node(self, node):
|
||||
"""Find node in graph."""
|
||||
if node.key() in self.nodes:
|
||||
return self.nodes[node.key()]
|
||||
return None
|
||||
|
||||
def find_from_node(self, from_node):
|
||||
"""Find from node."""
|
||||
for node_key in self.nodes:
|
||||
node = self.nodes[node_key]
|
||||
for next_node in node['next_nodes']:
|
||||
@ -86,6 +107,7 @@ class Graph(object):
|
||||
return None
|
||||
|
||||
def remove_nodes_without_edge(self):
|
||||
"""Remove nodes without edge."""
|
||||
# Rebuild graph by removing any nodes which do not have any incoming or outgoing edges.
|
||||
temp_nodes = {}
|
||||
for node_key in self.nodes:
|
||||
@ -95,28 +117,31 @@ class Graph(object):
|
||||
self.nodes = temp_nodes
|
||||
|
||||
def add_edge(self, from_node, to_node):
|
||||
f = self.find_node(from_node)
|
||||
if f is None:
|
||||
"""Add edge."""
|
||||
f_node = self.find_node(from_node)
|
||||
if f_node is None:
|
||||
self.add_node(from_node)
|
||||
f = self.nodes[from_node.key()]
|
||||
f_node = self.nodes[from_node.key()]
|
||||
|
||||
t = self.find_node(to_node)
|
||||
if t is None:
|
||||
t_node = self.find_node(to_node)
|
||||
if t_node is None:
|
||||
self.add_node(to_node)
|
||||
t = self.nodes[to_node.key()]
|
||||
t_node = self.nodes[to_node.key()]
|
||||
|
||||
for n in f['next_nodes']:
|
||||
if n == to_node.key():
|
||||
for n_node in f_node['next_nodes']:
|
||||
if n_node == to_node.key():
|
||||
return
|
||||
self.nodes[from_node.key()]['next_nodes'].append(to_node.key())
|
||||
|
||||
def print(self):
|
||||
"""Print graph."""
|
||||
for node_key in self.nodes:
|
||||
print("Node", self.nodes[node_key]['node'])
|
||||
for to in self.nodes[node_key]['next_nodes']:
|
||||
print(" ->", to)
|
||||
for to_node in self.nodes[node_key]['next_nodes']:
|
||||
print(" ->", to_node)
|
||||
|
||||
def to_graph(self, nodes=None, message=None):
|
||||
"""Return the 'to_graph'."""
|
||||
sb = []
|
||||
sb.append('# Legend:')
|
||||
sb.append('# Thread 1 -> Lock 1 indicates Thread 1 is waiting on Lock 1')
|
||||
@ -136,12 +161,14 @@ class Graph(object):
|
||||
sb.append("}")
|
||||
return "\n".join(sb)
|
||||
|
||||
def depth_first_search(self, node_key, nodes_visited, nodes_in_cycle=[]):
|
||||
"""
|
||||
def depth_first_search(self, node_key, nodes_visited, nodes_in_cycle=None):
|
||||
"""Perform depth first search and return the list of nodes in the cycle or None.
|
||||
|
||||
The nodes_visited is a set of nodes which indicates it has been visited.
|
||||
The node_in_cycle is a list of nodes in the potential cycle.
|
||||
Returns the list of nodes in the cycle or None.
|
||||
"""
|
||||
if nodes_in_cycle is None:
|
||||
nodes_in_cycle = []
|
||||
nodes_visited.add(node_key)
|
||||
nodes_in_cycle.append(node_key)
|
||||
for node in self.nodes[node_key]['next_nodes']:
|
||||
@ -158,9 +185,7 @@ class Graph(object):
|
||||
return None
|
||||
|
||||
def detect_cycle(self):
|
||||
"""
|
||||
If a cycle is detected, returns a list of nodes in the cycle or None.
|
||||
"""
|
||||
"""If a cycle is detected, returns a list of nodes in the cycle or None."""
|
||||
nodes_visited = set()
|
||||
for node in self.nodes:
|
||||
if node not in nodes_visited:
|
||||
@ -171,6 +196,7 @@ class Graph(object):
|
||||
|
||||
|
||||
def find_lwpid(thread_dict, search_thread_id):
|
||||
"""Find lwpid."""
|
||||
for (lwpid, thread_id) in thread_dict.items():
|
||||
if thread_id == search_thread_id:
|
||||
return lwpid
|
||||
@ -178,6 +204,7 @@ def find_lwpid(thread_dict, search_thread_id):
|
||||
|
||||
|
||||
def find_func_block(block):
|
||||
"""Find func block."""
|
||||
while block:
|
||||
if block.function:
|
||||
return block
|
||||
@ -186,6 +213,7 @@ def find_func_block(block):
|
||||
|
||||
|
||||
def find_frame(function_name_pattern):
|
||||
"""Find frame."""
|
||||
frame = gdb.newest_frame()
|
||||
while frame:
|
||||
block = None
|
||||
@ -207,6 +235,7 @@ def find_frame(function_name_pattern):
|
||||
|
||||
|
||||
def find_mutex_holder(graph, thread_dict, show):
|
||||
"""Find mutex holder."""
|
||||
frame = find_frame(r'std::mutex::lock\(\)')
|
||||
if frame is None:
|
||||
return
|
||||
@ -241,6 +270,7 @@ def find_mutex_holder(graph, thread_dict, show):
|
||||
|
||||
|
||||
def find_lock_manager_holders(graph, thread_dict, show):
|
||||
"""Find lock manager holders."""
|
||||
frame = find_frame(r'mongo::LockerImpl\<.*\>::')
|
||||
if not frame:
|
||||
return
|
||||
@ -253,8 +283,8 @@ def find_lock_manager_holders(graph, thread_dict, show):
|
||||
lock_head = gdb.parse_and_eval(
|
||||
"mongo::getGlobalLockManager()->_getBucket(resId)->findOrInsert(resId)")
|
||||
|
||||
grantedList = lock_head.dereference()["grantedList"]
|
||||
lock_request_ptr = grantedList["_front"]
|
||||
granted_list = lock_head.dereference()["grantedList"]
|
||||
lock_request_ptr = granted_list["_front"]
|
||||
while lock_request_ptr:
|
||||
lock_request = lock_request_ptr.dereference()
|
||||
locker_ptr = lock_request["locker"]
|
||||
@ -274,6 +304,7 @@ def find_lock_manager_holders(graph, thread_dict, show):
|
||||
|
||||
|
||||
def get_locks(graph, thread_dict, show=False):
|
||||
"""Get locks."""
|
||||
for thread in gdb.selected_inferior().threads():
|
||||
try:
|
||||
if not thread.is_valid():
|
||||
@ -285,7 +316,8 @@ def get_locks(graph, thread_dict, show=False):
|
||||
print("Ignoring GDB error '%s' in get_locks" % str(err))
|
||||
|
||||
|
||||
def get_threads_info(graph=None):
|
||||
def get_threads_info():
|
||||
"""Get threads info."""
|
||||
thread_dict = {}
|
||||
for thread in gdb.selected_inferior().threads():
|
||||
try:
|
||||
@ -295,7 +327,7 @@ def get_threads_info(graph=None):
|
||||
# PTID is a tuple: Process ID (PID), Lightweight Process ID (LWPID), Thread ID (TID)
|
||||
(_, lwpid, _) = thread.ptid
|
||||
thread_num = thread.num
|
||||
thread_id = get_thread_id()
|
||||
thread_id = mongo.get_thread_id()
|
||||
if not thread_id:
|
||||
print("Unable to retrieve thread_info for thread %d" % thread_num)
|
||||
continue
|
||||
@ -307,16 +339,19 @@ def get_threads_info(graph=None):
|
||||
|
||||
|
||||
class MongoDBShowLocks(gdb.Command):
|
||||
"""Show MongoDB locks & pthread mutexes"""
|
||||
"""Show MongoDB locks & pthread mutexes."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-show-locks", gdb.COMMAND_DATA)
|
||||
"""Initialize MongoDBShowLocks."""
|
||||
mongo.register_mongo_command(self, "mongodb-show-locks", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, *_):
|
||||
"""Invoke mongodb_show_locks."""
|
||||
self.mongodb_show_locks()
|
||||
|
||||
def mongodb_show_locks(self):
|
||||
"""GDB in-process python supplement"""
|
||||
@staticmethod
|
||||
def mongodb_show_locks():
|
||||
"""GDB in-process python supplement."""
|
||||
try:
|
||||
thread_dict = get_threads_info()
|
||||
get_locks(graph=None, thread_dict=thread_dict, show=True)
|
||||
@ -324,24 +359,27 @@ class MongoDBShowLocks(gdb.Command):
|
||||
print("Ignoring GDB error '%s' in mongodb_show_locks" % str(err))
|
||||
|
||||
|
||||
MongoDBShowLocks()
|
||||
mongo.MongoDBShowLocks() # type: ignore
|
||||
|
||||
|
||||
class MongoDBWaitsForGraph(gdb.Command):
|
||||
"""Create MongoDB WaitsFor lock graph [graph_file]"""
|
||||
"""Create MongoDB WaitsFor lock graph [graph_file]."""
|
||||
|
||||
def __init__(self):
|
||||
register_mongo_command(self, "mongodb-waitsfor-graph", gdb.COMMAND_DATA)
|
||||
"""Initialize MongoDBWaitsForGraph."""
|
||||
mongo.register_mongo_command(self, "mongodb-waitsfor-graph", gdb.COMMAND_DATA)
|
||||
|
||||
def invoke(self, arg, _from_tty):
|
||||
def invoke(self, arg, *_):
|
||||
"""Invoke mongodb_waitsfor_graph."""
|
||||
self.mongodb_waitsfor_graph(arg)
|
||||
|
||||
def mongodb_waitsfor_graph(self, file=None):
|
||||
"""GDB in-process python supplement"""
|
||||
@staticmethod
|
||||
def mongodb_waitsfor_graph(graph_file=None):
|
||||
"""GDB in-process python supplement."""
|
||||
|
||||
graph = Graph()
|
||||
try:
|
||||
thread_dict = get_threads_info(graph=graph)
|
||||
thread_dict = get_threads_info()
|
||||
get_locks(graph=graph, thread_dict=thread_dict, show=False)
|
||||
graph.remove_nodes_without_edge()
|
||||
if graph.is_empty():
|
||||
@ -351,10 +389,10 @@ class MongoDBWaitsForGraph(gdb.Command):
|
||||
cycle_nodes = graph.detect_cycle()
|
||||
if cycle_nodes:
|
||||
cycle_message = "# Cycle detected in the graph nodes %s" % cycle_nodes
|
||||
if file:
|
||||
print("Saving digraph to %s" % file)
|
||||
with open(file, 'w') as f:
|
||||
f.write(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
|
||||
if graph_file:
|
||||
print("Saving digraph to %s" % graph_file)
|
||||
with open(graph_file, 'w') as fh:
|
||||
fh.write(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
|
||||
print(cycle_message.split("# ")[1])
|
||||
else:
|
||||
print(graph.to_graph(nodes=cycle_nodes, message=cycle_message))
|
||||
|
@ -1,24 +1,24 @@
|
||||
"""GDB Pretty-printers for MongoDB
|
||||
"""
|
||||
"""GDB Pretty-printers for MongoDB."""
|
||||
from __future__ import print_function
|
||||
|
||||
import gdb.printing
|
||||
import struct
|
||||
import sys
|
||||
|
||||
import gdb.printing
|
||||
|
||||
try:
|
||||
import bson
|
||||
import bson.json_util
|
||||
import collections
|
||||
from bson.codec_options import CodecOptions
|
||||
except ImportError as e:
|
||||
except ImportError as err:
|
||||
print("Warning: Could not load bson library for Python '" + str(sys.version) + "'.")
|
||||
print("Check with the pip command if pymongo 3.x is installed.")
|
||||
bson = None
|
||||
|
||||
|
||||
def get_unique_ptr(obj):
|
||||
"""Read the value of a libstdc++ std::unique_ptr"""
|
||||
"""Read the value of a libstdc++ std::unique_ptr."""
|
||||
return obj["_M_t"]['_M_head_impl']
|
||||
|
||||
|
||||
@ -30,13 +30,14 @@ def get_unique_ptr(obj):
|
||||
|
||||
|
||||
class StatusPrinter(object):
|
||||
"""Pretty-printer for mongo::Status"""
|
||||
OK = 0 # ErrorCodes::OK
|
||||
"""Pretty-printer for mongo::Status."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize StatusPrinter."""
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
"""Return status for printing."""
|
||||
if not self.val['_error']:
|
||||
return 'Status::OK()'
|
||||
|
||||
@ -49,13 +50,15 @@ class StatusPrinter(object):
|
||||
return 'Status(%s, %s)' % (code, reason)
|
||||
|
||||
|
||||
class StatusWithPrinter:
|
||||
"""Pretty-printer for mongo::StatusWith<>"""
|
||||
class StatusWithPrinter(object):
|
||||
"""Pretty-printer for mongo::StatusWith<>."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize StatusWithPrinter."""
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
"""Return status for printing."""
|
||||
if not self.val['_status']['_error']:
|
||||
return 'StatusWith(OK, %s)' % (self.val['_t'])
|
||||
|
||||
@ -69,27 +72,31 @@ class StatusWithPrinter:
|
||||
return 'StatusWith(%s, %s)' % (code, reason)
|
||||
|
||||
|
||||
class StringDataPrinter:
|
||||
"""Pretty-printer for mongo::StringData"""
|
||||
class StringDataPrinter(object):
|
||||
"""Pretty-printer for mongo::StringData."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize StringDataPrinter."""
|
||||
self.val = val
|
||||
|
||||
def display_hint(self):
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
"""Display hint."""
|
||||
return 'string'
|
||||
|
||||
def to_string(self):
|
||||
"""Return data for printing."""
|
||||
size = self.val["_size"]
|
||||
if size == -1:
|
||||
return self.val['_data'].lazy_string()
|
||||
else:
|
||||
return self.val['_data'].lazy_string(length=size)
|
||||
return self.val['_data'].lazy_string(length=size)
|
||||
|
||||
|
||||
class BSONObjPrinter:
|
||||
"""Pretty-printer for mongo::BSONObj"""
|
||||
class BSONObjPrinter(object):
|
||||
"""Pretty-printer for mongo::BSONObj."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize BSONObjPrinter."""
|
||||
self.val = val
|
||||
self.ptr = self.val['_objdata'].cast(gdb.lookup_type('void').pointer())
|
||||
# Handle the endianness of the BSON object size, which is represented as a 32-bit integer
|
||||
@ -101,10 +108,13 @@ class BSONObjPrinter:
|
||||
else:
|
||||
self.size = struct.unpack('<I', inferior.read_memory(self.ptr, 4))[0]
|
||||
|
||||
def display_hint(self):
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
"""Display hint."""
|
||||
return 'map'
|
||||
|
||||
def children(self):
|
||||
"""Children."""
|
||||
# Do not decode a BSONObj with an invalid size.
|
||||
if not bson or self.size < 5 or self.size > 17 * 1024 * 1024:
|
||||
return
|
||||
@ -114,11 +124,12 @@ class BSONObjPrinter:
|
||||
options = CodecOptions(document_class=collections.OrderedDict)
|
||||
bsondoc = bson.BSON.decode(buf, codec_options=options)
|
||||
|
||||
for k, v in bsondoc.items():
|
||||
yield 'key', k
|
||||
yield 'value', bson.json_util.dumps(v)
|
||||
for key, val in bsondoc.items():
|
||||
yield 'key', key
|
||||
yield 'value', bson.json_util.dumps(val)
|
||||
|
||||
def to_string(self):
|
||||
"""Return BSONObj for printing."""
|
||||
# The value has been optimized out.
|
||||
if self.size == -1:
|
||||
return "BSONObj @ %s" % (self.ptr)
|
||||
@ -132,29 +143,33 @@ class BSONObjPrinter:
|
||||
|
||||
if size == 5:
|
||||
return "%s empty BSONObj @ %s" % (ownership, self.ptr)
|
||||
else:
|
||||
return "%s BSONObj %s bytes @ %s" % (ownership, size, self.ptr)
|
||||
return "%s BSONObj %s bytes @ %s" % (ownership, size, self.ptr)
|
||||
|
||||
|
||||
class UnorderedFastKeyTablePrinter:
|
||||
"""Pretty-printer for mongo::UnorderedFastKeyTable<>"""
|
||||
class UnorderedFastKeyTablePrinter(object):
|
||||
"""Pretty-printer for mongo::UnorderedFastKeyTable<>."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize UnorderedFastKeyTablePrinter."""
|
||||
self.val = val
|
||||
|
||||
# Get the value_type by doing a type lookup
|
||||
valueTypeName = val.type.strip_typedefs().name + "::value_type"
|
||||
valueType = gdb.lookup_type(valueTypeName).target()
|
||||
self.valueTypePtr = valueType.pointer()
|
||||
value_type_name = val.type.strip_typedefs().name + "::value_type"
|
||||
value_type = gdb.lookup_type(value_type_name).target()
|
||||
self.value_type_ptr = value_type.pointer()
|
||||
|
||||
def display_hint(self):
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
"""Display hint."""
|
||||
return 'map'
|
||||
|
||||
def to_string(self):
|
||||
"""Return UnorderedFastKeyTablePrinter for printing."""
|
||||
return "UnorderedFastKeyTablePrinter<%s> with %s elems " % (
|
||||
self.val.type.template_argument(0), self.val["_size"])
|
||||
|
||||
def children(self):
|
||||
"""Children."""
|
||||
cap = self.val["_area"]["_hashMask"] + 1
|
||||
it = get_unique_ptr(self.val["_area"]["_entries"])
|
||||
end = it + cap
|
||||
@ -168,16 +183,17 @@ class UnorderedFastKeyTablePrinter:
|
||||
if not elt['_used']:
|
||||
continue
|
||||
|
||||
value = elt['_data']["__data"].cast(self.valueTypePtr).dereference()
|
||||
value = elt['_data']["__data"].cast(self.value_type_ptr).dereference()
|
||||
|
||||
yield ('key', value['first'])
|
||||
yield ('value', value['second'])
|
||||
|
||||
|
||||
class DecorablePrinter:
|
||||
"""Pretty-printer for mongo::Decorable<>"""
|
||||
class DecorablePrinter(object):
|
||||
"""Pretty-printer for mongo::Decorable<>."""
|
||||
|
||||
def __init__(self, val):
|
||||
"""Initialize DecorablePrinter."""
|
||||
self.val = val
|
||||
|
||||
decl_vector = val["_decorations"]["_registry"]["_decorationInfo"]
|
||||
@ -187,14 +203,18 @@ class DecorablePrinter:
|
||||
decinfo_t = gdb.lookup_type('mongo::DecorationRegistry::DecorationInfo')
|
||||
self.count = int((int(finish) - int(self.start)) / decinfo_t.sizeof)
|
||||
|
||||
def display_hint(self):
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
"""Display hint."""
|
||||
return 'map'
|
||||
|
||||
def to_string(self):
|
||||
"""Return Decorable for printing."""
|
||||
return "Decorable<%s> with %s elems " % (self.val.type.template_argument(0), self.count)
|
||||
|
||||
def children(self):
|
||||
decorationData = get_unique_ptr(self.val["_decorations"]["_decorationData"])
|
||||
"""Children."""
|
||||
decoration_data = get_unique_ptr(self.val["_decorations"]["_decorationData"])
|
||||
|
||||
for index in range(self.count):
|
||||
descriptor = self.start[index]
|
||||
@ -215,19 +235,20 @@ class DecorablePrinter:
|
||||
|
||||
# Cast the raw char[] into the actual object that is stored there.
|
||||
type_t = gdb.lookup_type(type_name)
|
||||
obj = decorationData[dindex].cast(type_t)
|
||||
obj = decoration_data[dindex].cast(type_t)
|
||||
|
||||
yield ('key', "%d:%s:%s" % (index, obj.address, type_name))
|
||||
yield ('value', obj)
|
||||
|
||||
|
||||
def find_match_brackets(search, opening='<', closing='>'):
|
||||
"""Returns the index of the closing bracket that matches the first opening bracket.
|
||||
Returns -1 if no last matching bracket is found, i.e. not a template.
|
||||
"""Return the index of the closing bracket that matches the first opening bracket.
|
||||
|
||||
Example:
|
||||
'Foo<T>::iterator<U>''
|
||||
returns 5
|
||||
Return -1 if no last matching bracket is found, i.e. not a template.
|
||||
|
||||
Example:
|
||||
'Foo<T>::iterator<U>''
|
||||
returns 5
|
||||
"""
|
||||
index = search.find(opening)
|
||||
if index == -1:
|
||||
@ -237,11 +258,11 @@ def find_match_brackets(search, opening='<', closing='>'):
|
||||
count = 1
|
||||
str_len = len(search)
|
||||
for index in range(start, str_len):
|
||||
c = search[index]
|
||||
char = search[index]
|
||||
|
||||
if c == opening:
|
||||
if char == opening:
|
||||
count += 1
|
||||
elif c == closing:
|
||||
elif char == closing:
|
||||
count -= 1
|
||||
|
||||
if count == 0:
|
||||
@ -251,9 +272,10 @@ def find_match_brackets(search, opening='<', closing='>'):
|
||||
|
||||
|
||||
class MongoSubPrettyPrinter(gdb.printing.SubPrettyPrinter):
|
||||
"""Sub pretty printer managed by the pretty-printer collection"""
|
||||
"""Sub pretty printer managed by the pretty-printer collection."""
|
||||
|
||||
def __init__(self, name, prefix, is_template, printer):
|
||||
"""Initialize MongoSubPrettyPrinter."""
|
||||
super(MongoSubPrettyPrinter, self).__init__(name)
|
||||
self.prefix = prefix
|
||||
self.printer = printer
|
||||
@ -262,16 +284,20 @@ class MongoSubPrettyPrinter(gdb.printing.SubPrettyPrinter):
|
||||
|
||||
class MongoPrettyPrinterCollection(gdb.printing.PrettyPrinter):
|
||||
"""MongoDB-specific printer printer collection that ignores subtypes.
|
||||
|
||||
It will match 'HashTable<T> but not 'HashTable<T>::iterator' when asked for 'HashTable'.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize MongoPrettyPrinterCollection."""
|
||||
super(MongoPrettyPrinterCollection, self).__init__("mongo", [])
|
||||
|
||||
def add(self, name, prefix, is_template, printer):
|
||||
"""Add a subprinter."""
|
||||
self.subprinters.append(MongoSubPrettyPrinter(name, prefix, is_template, printer))
|
||||
|
||||
def __call__(self, val):
|
||||
"""Return matched printer type."""
|
||||
|
||||
# Get the type name.
|
||||
lookup_tag = gdb.types.get_basic_type(val.type).tag
|
||||
@ -286,15 +312,18 @@ class MongoPrettyPrinterCollection(gdb.printing.PrettyPrinter):
|
||||
# We do not want HashTable<T>::iterator as an example, just HashTable<T>
|
||||
if index == -1 or index + 1 == len(lookup_tag):
|
||||
for printer in self.subprinters:
|
||||
if printer.enabled and (
|
||||
(printer.is_template and lookup_tag.find(printer.prefix) == 0) or
|
||||
(not printer.is_template and lookup_tag == printer.prefix)):
|
||||
return printer.printer(val)
|
||||
if not printer.enabled:
|
||||
continue
|
||||
if ((not printer.is_template or lookup_tag.find(printer.prefix) != 0)
|
||||
and (printer.is_template or lookup_tag != printer.prefix)):
|
||||
continue
|
||||
return printer.printer(val)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def build_pretty_printer():
|
||||
"""Build a pretty printer."""
|
||||
pp = MongoPrettyPrinterCollection()
|
||||
pp.add('BSONObj', 'mongo::BSONObj', False, BSONObjPrinter)
|
||||
pp.add('Decorable', 'mongo::Decorable', True, DecorablePrinter)
|
||||
|
@ -1,12 +1,13 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This script generates the compile expansions file used by Evergreen as part of the push/release
|
||||
process.
|
||||
Generate the compile expansions file used by Evergreen as part of the push/release process.
|
||||
|
||||
Invoke by specifying an output file.
|
||||
$ python generate_compile_expansions.py --out compile_expansions.yml
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
@ -14,7 +15,7 @@ import re
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
version_json = "version.json"
|
||||
VERSION_JSON = "version.json"
|
||||
|
||||
|
||||
def generate_expansions():
|
||||
@ -25,7 +26,7 @@ def generate_expansions():
|
||||
"""
|
||||
args = parse_args()
|
||||
expansions = {}
|
||||
expansions.update(generate_version_expansions(args))
|
||||
expansions.update(generate_version_expansions())
|
||||
expansions.update(generate_scons_cache_expansions())
|
||||
|
||||
with open(args.out, "w") as out:
|
||||
@ -34,18 +35,19 @@ def generate_expansions():
|
||||
|
||||
|
||||
def parse_args():
|
||||
"""Parse program arguments."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--out", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def generate_version_expansions(args):
|
||||
def generate_version_expansions():
|
||||
"""Generate expansions from a version.json file if given, or $MONGO_VERSION."""
|
||||
expansions = {}
|
||||
|
||||
if os.path.exists(version_json):
|
||||
with open(version_json, "r") as f:
|
||||
data = f.read()
|
||||
if os.path.exists(VERSION_JSON):
|
||||
with open(VERSION_JSON, "r") as fh:
|
||||
data = fh.read()
|
||||
version_data = json.loads(data)
|
||||
version_line = version_data['version']
|
||||
version_parts = match_verstr(version_line)
|
||||
@ -81,8 +83,8 @@ def generate_scons_cache_expansions():
|
||||
default_cache_path_base = "/data/scons-cache"
|
||||
|
||||
if os.path.isfile(system_id_path):
|
||||
with open(system_id_path, "r") as f:
|
||||
default_cache_path = os.path.join(default_cache_path_base, f.readline().strip())
|
||||
with open(system_id_path, "r") as fh:
|
||||
default_cache_path = os.path.join(default_cache_path_base, fh.readline().strip())
|
||||
|
||||
expansions["scons_cache_path"] = default_cache_path
|
||||
|
||||
@ -98,8 +100,7 @@ def generate_scons_cache_expansions():
|
||||
|
||||
|
||||
def match_verstr(verstr):
|
||||
"""
|
||||
This function matches a version string and captures the "extra" part.
|
||||
"""Match a version string and capture the "extra" part.
|
||||
|
||||
If the version is a release like "2.3.4" or "2.3.4-rc0", this will return
|
||||
None. If the version is a pre-release like "2.3.4-325-githash" or
|
||||
|
@ -1,12 +1,13 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This script generates the compile expansions file used by Evergreen as part of the push/release
|
||||
process.
|
||||
Generate the compile expansions file used by Evergreen as part of the push/release process.
|
||||
|
||||
Invoke by specifying an output file.
|
||||
$ python generate_compile_expansions.py --out compile_expansions.yml
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
@ -14,7 +15,7 @@ import re
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
version_json = "version.json"
|
||||
VERSION_JSON = "version.json"
|
||||
|
||||
|
||||
def generate_expansions():
|
||||
@ -25,7 +26,7 @@ def generate_expansions():
|
||||
"""
|
||||
args = parse_args()
|
||||
expansions = {}
|
||||
expansions.update(generate_version_expansions(args))
|
||||
expansions.update(generate_version_expansions())
|
||||
expansions.update(generate_scons_cache_expansions())
|
||||
|
||||
with open(args.out, "w") as out:
|
||||
@ -34,18 +35,19 @@ def generate_expansions():
|
||||
|
||||
|
||||
def parse_args():
|
||||
"""Parse program arguments."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--out", required=True)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def generate_version_expansions(args):
|
||||
def generate_version_expansions():
|
||||
"""Generate expansions from a version.json file if given, or $MONGO_VERSION."""
|
||||
expansions = {}
|
||||
|
||||
if os.path.exists(version_json):
|
||||
with open(version_json, "r") as f:
|
||||
data = f.read()
|
||||
if os.path.exists(VERSION_JSON):
|
||||
with open(VERSION_JSON, "r") as fh:
|
||||
data = fh.read()
|
||||
version_data = json.loads(data)
|
||||
version_line = version_data['version']
|
||||
version_parts = match_verstr(version_line)
|
||||
@ -84,8 +86,8 @@ def generate_scons_cache_expansions():
|
||||
system_id_path = "/etc/mongodb-build-system-id"
|
||||
|
||||
if os.path.isfile(system_id_path):
|
||||
with open(system_id_path, "r") as f:
|
||||
system_uuid = f.readline().strip()
|
||||
with open(system_id_path, "r") as fh:
|
||||
system_uuid = fh.readline().strip()
|
||||
|
||||
# Set the scons shared cache setting
|
||||
|
||||
@ -124,8 +126,7 @@ def generate_scons_cache_expansions():
|
||||
|
||||
|
||||
def match_verstr(verstr):
|
||||
"""
|
||||
This function matches a version string and captures the "extra" part.
|
||||
"""Match a version string and capture the "extra" part.
|
||||
|
||||
If the version is a release like "2.3.4" or "2.3.4-rc0", this will return
|
||||
None. If the version is a pre-release like "2.3.4-325-githash" or
|
||||
|
@ -22,17 +22,18 @@ if os.name == "posix" and sys.version_info[0] == 2:
|
||||
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
|
||||
" available. When using the subprocess module, a child process may trigger"
|
||||
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
|
||||
import subprocess
|
||||
import subprocess # type: ignore
|
||||
else:
|
||||
import subprocess
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Repository(object):
|
||||
class Repository(object): # pylint: disable=too-many-public-methods
|
||||
"""Represent a local git repository."""
|
||||
|
||||
def __init__(self, directory):
|
||||
"""Initialize Repository."""
|
||||
self.directory = directory
|
||||
|
||||
def git_add(self, args):
|
||||
@ -173,7 +174,8 @@ class Repository(object):
|
||||
def clone(url, directory, branch=None, depth=None):
|
||||
"""Clone the repository designed by 'url' into 'directory'.
|
||||
|
||||
Return a Repository instance."""
|
||||
Return a Repository instance.
|
||||
"""
|
||||
params = ["git", "clone"]
|
||||
if branch:
|
||||
params += ["--branch", branch]
|
||||
@ -188,7 +190,8 @@ class Repository(object):
|
||||
def get_base_directory(directory=None):
|
||||
"""Return the base directory of the repository the given directory belongs to.
|
||||
|
||||
If no directory is specified, then the current working directory is used."""
|
||||
If no directory is specified, then the current working directory is used.
|
||||
"""
|
||||
if directory is not None:
|
||||
params = ["git", "-C", directory]
|
||||
else:
|
||||
@ -221,8 +224,7 @@ class Repository(object):
|
||||
return result.returncode
|
||||
|
||||
def _run_cmd(self, cmd, args):
|
||||
"""Run the git command and return a GitCommandResult instance.
|
||||
"""
|
||||
"""Run the git command and return a GitCommandResult instance."""
|
||||
|
||||
params = ["git", cmd] + args
|
||||
return self._run_process(cmd, params, cwd=self.directory)
|
||||
@ -251,8 +253,9 @@ class GitException(Exception):
|
||||
stderr: the error output of the git command.
|
||||
"""
|
||||
|
||||
def __init__(self, message, returncode=None, cmd=None, process_args=None, stdout=None,
|
||||
stderr=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, message, returncode=None, cmd=None, process_args=None, stdout=None, stderr=None):
|
||||
"""Initialize GitException."""
|
||||
Exception.__init__(self, message)
|
||||
self.returncode = returncode
|
||||
self.cmd = cmd
|
||||
@ -272,7 +275,9 @@ class GitCommandResult(object):
|
||||
stderr: the error output of the command.
|
||||
"""
|
||||
|
||||
def __init__(self, cmd, process_args, returncode, stdout=None, stderr=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, cmd, process_args, returncode, stdout=None, stderr=None):
|
||||
"""Initialize GitCommandResult."""
|
||||
self.cmd = cmd
|
||||
self.process_args = process_args
|
||||
self.returncode = returncode
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""Hang Analyzer
|
||||
"""Hang Analyzer module.
|
||||
|
||||
A prototype hang analyzer for Evergreen integration to help investigate test timeouts
|
||||
|
||||
@ -25,11 +25,11 @@ import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import time
|
||||
from distutils import spawn
|
||||
from distutils import spawn # pylint: disable=no-name-in-module
|
||||
from optparse import OptionParser
|
||||
_is_windows = (sys.platform == "win32")
|
||||
_IS_WINDOWS = (sys.platform == "win32")
|
||||
|
||||
if _is_windows:
|
||||
if _IS_WINDOWS:
|
||||
import win32event
|
||||
import win32api
|
||||
|
||||
@ -39,11 +39,12 @@ if __name__ == "__main__" and __package__ is None:
|
||||
from buildscripts.resmokelib import core
|
||||
|
||||
|
||||
def call(a, logger):
|
||||
logger.info(str(a))
|
||||
def call(args, logger):
|
||||
"""Call subprocess on args list."""
|
||||
logger.info(str(args))
|
||||
|
||||
# Use a common pipe for stdout & stderr for logging.
|
||||
process = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
logger_pipe = core.pipe.LoggerPipe(logger, logging.INFO, process.stdout)
|
||||
logger_pipe.wait_until_started()
|
||||
|
||||
@ -51,54 +52,58 @@ def call(a, logger):
|
||||
logger_pipe.wait_until_finished()
|
||||
|
||||
if ret != 0:
|
||||
logger.error("Bad exit code %d" % (ret))
|
||||
raise Exception("Bad exit code %d from %s" % (ret, " ".join(a)))
|
||||
logger.error("Bad exit code %d", ret)
|
||||
raise Exception("Bad exit code %d from %s" % (ret, " ".join(args)))
|
||||
|
||||
|
||||
def callo(a, logger):
|
||||
logger.info("%s" % str(a))
|
||||
def callo(args, logger):
|
||||
"""Call subprocess on args string."""
|
||||
logger.info("%s", str(args))
|
||||
|
||||
return subprocess.check_output(a)
|
||||
return subprocess.check_output(args)
|
||||
|
||||
|
||||
def find_program(prog, paths):
|
||||
"""Finds the specified program in env PATH, or tries a set of paths """
|
||||
"""Find the specified program in env PATH, or tries a set of paths."""
|
||||
loc = spawn.find_executable(prog)
|
||||
|
||||
if loc is not None:
|
||||
return loc
|
||||
|
||||
for loc in paths:
|
||||
p = os.path.join(loc, prog)
|
||||
if os.path.exists(p):
|
||||
return p
|
||||
full_prog = os.path.join(loc, prog)
|
||||
if os.path.exists(full_prog):
|
||||
return full_prog
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_process_logger(debugger_output, pid, process_name):
|
||||
"""Returns the process logger from options specified."""
|
||||
"""Return the process logger from options specified."""
|
||||
process_logger = logging.Logger("process", level=logging.DEBUG)
|
||||
process_logger.mongo_process_filename = None
|
||||
|
||||
if 'stdout' in debugger_output:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
process_logger.addHandler(handler)
|
||||
s_handler = logging.StreamHandler(sys.stdout)
|
||||
s_handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
process_logger.addHandler(s_handler)
|
||||
|
||||
if 'file' in debugger_output:
|
||||
filename = "debugger_%s_%d.log" % (os.path.splitext(process_name)[0], pid)
|
||||
process_logger.mongo_process_filename = filename
|
||||
handler = logging.FileHandler(filename=filename, mode="w")
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
process_logger.addHandler(handler)
|
||||
f_handler = logging.FileHandler(filename=filename, mode="w")
|
||||
f_handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
process_logger.addHandler(f_handler)
|
||||
|
||||
return process_logger
|
||||
|
||||
|
||||
class WindowsDumper(object):
|
||||
def __find_debugger(self, logger, debugger):
|
||||
"""Finds the installed debugger"""
|
||||
"""WindowsDumper class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_debugger(logger, debugger):
|
||||
"""Find the installed debugger."""
|
||||
# We are looking for c:\Program Files (x86)\Windows Kits\8.1\Debuggers\x64
|
||||
cdb = spawn.find_executable(debugger)
|
||||
if cdb is not None:
|
||||
@ -107,26 +112,27 @@ class WindowsDumper(object):
|
||||
|
||||
# Cygwin via sshd does not expose the normal environment variables
|
||||
# Use the shell api to get the variable instead
|
||||
rootDir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILESX86, None, 0)
|
||||
root_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILESX86, None, 0)
|
||||
|
||||
for i in range(0, 2):
|
||||
pathToTest = os.path.join(rootDir, "Windows Kits", "8." + str(i), "Debuggers", "x64")
|
||||
logger.info("Checking for debugger in %s" % pathToTest)
|
||||
if (os.path.exists(pathToTest)):
|
||||
return os.path.join(pathToTest, debugger)
|
||||
for idx in range(0, 2):
|
||||
dbg_path = os.path.join(root_dir, "Windows Kits", "8." + str(idx), "Debuggers", "x64")
|
||||
logger.info("Checking for debugger in %s", dbg_path)
|
||||
if os.path.exists(dbg_path):
|
||||
return os.path.join(dbg_path, debugger)
|
||||
|
||||
return None
|
||||
|
||||
def dump_info(self, root_logger, logger, pid, process_name, take_dump):
|
||||
"""Dump useful information to the console"""
|
||||
def dump_info( # pylint: disable=too-many-arguments
|
||||
self, root_logger, logger, pid, process_name, take_dump):
|
||||
"""Dump useful information to the console."""
|
||||
debugger = "cdb.exe"
|
||||
dbg = self.__find_debugger(root_logger, debugger)
|
||||
|
||||
if dbg is None:
|
||||
root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
|
||||
root_logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
|
||||
return
|
||||
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
|
||||
|
||||
dump_command = ""
|
||||
if take_dump:
|
||||
@ -134,7 +140,7 @@ class WindowsDumper(object):
|
||||
dump_file = "dump_%s.%d.%s" % (os.path.splitext(process_name)[0], pid,
|
||||
self.get_dump_ext())
|
||||
dump_command = ".dump /ma %s" % dump_file
|
||||
root_logger.info("Dumping core to %s" % dump_file)
|
||||
root_logger.info("Dumping core to %s", dump_file)
|
||||
|
||||
cmds = [
|
||||
".symfix", # Fixup symbol path
|
||||
@ -151,48 +157,56 @@ class WindowsDumper(object):
|
||||
|
||||
call([dbg, '-c', ";".join(cmds), '-p', str(pid)], logger)
|
||||
|
||||
root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
|
||||
root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
|
||||
|
||||
def get_dump_ext(self):
|
||||
@staticmethod
|
||||
def get_dump_ext():
|
||||
"""Return the dump file extension."""
|
||||
return "mdmp"
|
||||
|
||||
|
||||
class WindowsProcessList(object):
|
||||
def __find_ps(self):
|
||||
"""Finds tasklist """
|
||||
"""WindowsProcessList class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_ps():
|
||||
"""Find tasklist."""
|
||||
return os.path.join(os.environ["WINDIR"], "system32", "tasklist.exe")
|
||||
|
||||
def dump_processes(self, logger):
|
||||
"""Get list of [Pid, Process Name]"""
|
||||
"""Get list of [Pid, Process Name]."""
|
||||
ps = self.__find_ps()
|
||||
|
||||
logger.info("Getting list of processes using %s" % ps)
|
||||
logger.info("Getting list of processes using %s", ps)
|
||||
|
||||
ret = callo([ps, "/FO", "CSV"], logger)
|
||||
|
||||
b = StringIO.StringIO(ret)
|
||||
csvReader = csv.reader(b)
|
||||
buff = StringIO.StringIO(ret)
|
||||
csv_reader = csv.reader(buff)
|
||||
|
||||
p = [[int(row[1]), row[0]] for row in csvReader if row[1] != "PID"]
|
||||
|
||||
return p
|
||||
return [[int(row[1]), row[0]] for row in csv_reader if row[1] != "PID"]
|
||||
|
||||
|
||||
# LLDB dumper is for MacOS X
|
||||
class LLDBDumper(object):
|
||||
def __find_debugger(self, debugger):
|
||||
"""Finds the installed debugger"""
|
||||
"""LLDBDumper class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_debugger(debugger):
|
||||
"""Find the installed debugger."""
|
||||
return find_program(debugger, ['/usr/bin'])
|
||||
|
||||
def dump_info(self, root_logger, logger, pid, process_name, take_dump):
|
||||
def dump_info( # pylint: disable=too-many-arguments
|
||||
self, root_logger, logger, pid, process_name, take_dump):
|
||||
"""Dump info."""
|
||||
debugger = "lldb"
|
||||
dbg = self.__find_debugger(debugger)
|
||||
|
||||
if dbg is None:
|
||||
root_logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
|
||||
root_logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
|
||||
return
|
||||
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
|
||||
|
||||
lldb_version = callo([dbg, "--version"], logger)
|
||||
|
||||
@ -217,7 +231,7 @@ class LLDBDumper(object):
|
||||
# Dump to file, dump_<process name>.<pid>.core
|
||||
dump_file = "dump_%s.%d.%s" % (process_name, pid, self.get_dump_ext())
|
||||
dump_command = "process save-core %s" % dump_file
|
||||
root_logger.info("Dumping core to %s" % dump_file)
|
||||
root_logger.info("Dumping core to %s", dump_file)
|
||||
|
||||
cmds = [
|
||||
"attach -p %d" % pid,
|
||||
@ -230,8 +244,8 @@ class LLDBDumper(object):
|
||||
|
||||
tf = tempfile.NamedTemporaryFile()
|
||||
|
||||
for c in cmds:
|
||||
tf.write(c + "\n")
|
||||
for cmd in cmds:
|
||||
tf.write(cmd + "\n")
|
||||
|
||||
tf.flush()
|
||||
|
||||
@ -240,60 +254,68 @@ class LLDBDumper(object):
|
||||
call(['cat', tf.name], logger)
|
||||
call([dbg, '--source', tf.name], logger)
|
||||
|
||||
root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
|
||||
root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
|
||||
|
||||
def get_dump_ext(self):
|
||||
@staticmethod
|
||||
def get_dump_ext():
|
||||
"""Return the dump file extension."""
|
||||
return "core"
|
||||
|
||||
|
||||
class DarwinProcessList(object):
|
||||
def __find_ps(self):
|
||||
"""Finds ps"""
|
||||
"""DarwinProcessList class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_ps():
|
||||
"""Find ps."""
|
||||
return find_program('ps', ['/bin'])
|
||||
|
||||
def dump_processes(self, logger):
|
||||
"""Get list of [Pid, Process Name]"""
|
||||
"""Get list of [Pid, Process Name]."""
|
||||
ps = self.__find_ps()
|
||||
|
||||
logger.info("Getting list of processes using %s" % ps)
|
||||
logger.info("Getting list of processes using %s", ps)
|
||||
|
||||
ret = callo([ps, "-axco", "pid,comm"], logger)
|
||||
|
||||
b = StringIO.StringIO(ret)
|
||||
csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
buff = StringIO.StringIO(ret)
|
||||
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
|
||||
p = [[int(row[0]), row[1]] for row in csvReader if row[0] != "PID"]
|
||||
|
||||
return p
|
||||
return [[int(row[0]), row[1]] for row in csv_reader if row[0] != "PID"]
|
||||
|
||||
|
||||
# GDB dumper is for Linux & Solaris
|
||||
class GDBDumper(object):
|
||||
def __find_debugger(self, debugger):
|
||||
"""Finds the installed debugger"""
|
||||
"""GDBDumper class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_debugger(debugger):
|
||||
"""Find the installed debugger."""
|
||||
return find_program(debugger, ['/opt/mongodbtoolchain/gdb/bin', '/usr/bin'])
|
||||
|
||||
def dump_info(self, root_logger, logger, pid, process_name, take_dump):
|
||||
def dump_info( # pylint: disable=too-many-arguments,too-many-locals
|
||||
self, root_logger, logger, pid, process_name, take_dump):
|
||||
"""Dump info."""
|
||||
debugger = "gdb"
|
||||
dbg = self.__find_debugger(debugger)
|
||||
|
||||
if dbg is None:
|
||||
logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
|
||||
logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
|
||||
return
|
||||
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d" % (dbg, process_name, pid))
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d", dbg, process_name, pid)
|
||||
|
||||
dump_command = ""
|
||||
if take_dump:
|
||||
# Dump to file, dump_<process name>.<pid>.core
|
||||
dump_file = "dump_%s.%d.%s" % (process_name, pid, self.get_dump_ext())
|
||||
dump_command = "gcore %s" % dump_file
|
||||
root_logger.info("Dumping core to %s" % dump_file)
|
||||
root_logger.info("Dumping core to %s", dump_file)
|
||||
|
||||
call([dbg, "--version"], logger)
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
root_logger.info("dir %s" % script_dir)
|
||||
root_logger.info("dir %s", script_dir)
|
||||
gdb_dir = os.path.join(script_dir, "gdb")
|
||||
mongo_script = os.path.join(gdb_dir, "mongo.py")
|
||||
mongo_printers_script = os.path.join(gdb_dir, "mongo_printers.py")
|
||||
@ -363,13 +385,16 @@ class GDBDumper(object):
|
||||
call([dbg, "--quiet", "--nx"] +
|
||||
list(itertools.chain.from_iterable([['-ex', b] for b in cmds])), logger)
|
||||
|
||||
root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
|
||||
root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
|
||||
|
||||
def get_dump_ext(self):
|
||||
@staticmethod
|
||||
def get_dump_ext():
|
||||
"""Return the dump file extension."""
|
||||
return "core"
|
||||
|
||||
def _find_gcore(self):
|
||||
"""Finds the installed gcore"""
|
||||
@staticmethod
|
||||
def _find_gcore():
|
||||
"""Find the installed gcore."""
|
||||
dbg = "/usr/bin/gcore"
|
||||
if os.path.exists(dbg):
|
||||
return dbg
|
||||
@ -378,81 +403,90 @@ class GDBDumper(object):
|
||||
|
||||
|
||||
class LinuxProcessList(object):
|
||||
def __find_ps(self):
|
||||
"""Finds ps"""
|
||||
"""LinuxProcessList class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_ps():
|
||||
"""Find ps."""
|
||||
return find_program('ps', ['/bin', '/usr/bin'])
|
||||
|
||||
def dump_processes(self, logger):
|
||||
"""Get list of [Pid, Process Name]"""
|
||||
"""Get list of [Pid, Process Name]."""
|
||||
ps = self.__find_ps()
|
||||
|
||||
logger.info("Getting list of processes using %s" % ps)
|
||||
logger.info("Getting list of processes using %s", ps)
|
||||
|
||||
call([ps, "--version"], logger)
|
||||
|
||||
ret = callo([ps, "-eo", "pid,args"], logger)
|
||||
|
||||
b = StringIO.StringIO(ret)
|
||||
csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
buff = StringIO.StringIO(ret)
|
||||
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
|
||||
p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
|
||||
|
||||
return p
|
||||
return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
|
||||
|
||||
|
||||
class SolarisProcessList(object):
|
||||
def __find_ps(self):
|
||||
"""Finds ps"""
|
||||
"""SolarisProcessList class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_ps():
|
||||
"""Find ps."""
|
||||
return find_program('ps', ['/bin', '/usr/bin'])
|
||||
|
||||
def dump_processes(self, logger):
|
||||
"""Get list of [Pid, Process Name]"""
|
||||
"""Get list of [Pid, Process Name]."""
|
||||
ps = self.__find_ps()
|
||||
|
||||
logger.info("Getting list of processes using %s" % ps)
|
||||
logger.info("Getting list of processes using %s", ps)
|
||||
|
||||
ret = callo([ps, "-eo", "pid,args"], logger)
|
||||
|
||||
b = StringIO.StringIO(ret)
|
||||
csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
buff = StringIO.StringIO(ret)
|
||||
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
|
||||
|
||||
p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
|
||||
|
||||
return p
|
||||
return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"]
|
||||
|
||||
|
||||
# jstack is a JDK utility
|
||||
class JstackDumper(object):
|
||||
def __find_debugger(self, debugger):
|
||||
"""Finds the installed jstack debugger"""
|
||||
"""JstackDumper class."""
|
||||
|
||||
@staticmethod
|
||||
def __find_debugger(debugger):
|
||||
"""Find the installed jstack debugger."""
|
||||
return find_program(debugger, ['/usr/bin'])
|
||||
|
||||
def dump_info(self, root_logger, logger, pid, process_name):
|
||||
"""Dump java thread stack traces to the console"""
|
||||
"""Dump java thread stack traces to the console."""
|
||||
debugger = "jstack"
|
||||
jstack = self.__find_debugger(debugger)
|
||||
|
||||
if jstack is None:
|
||||
logger.warning("Debugger %s not found, skipping dumping of %d" % (debugger, pid))
|
||||
logger.warning("Debugger %s not found, skipping dumping of %d", debugger, pid)
|
||||
return
|
||||
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d" % (jstack, process_name,
|
||||
pid))
|
||||
root_logger.info("Debugger %s, analyzing %s process with PID %d", jstack, process_name, pid)
|
||||
|
||||
call([jstack, "-l", str(pid)], logger)
|
||||
|
||||
root_logger.info("Done analyzing %s process with PID %d" % (process_name, pid))
|
||||
root_logger.info("Done analyzing %s process with PID %d", process_name, pid)
|
||||
|
||||
|
||||
# jstack is a JDK utility
|
||||
class JstackWindowsDumper(object):
|
||||
def dump_info(self, root_logger, logger, pid, process_name):
|
||||
"""Dump java thread stack traces to the logger"""
|
||||
"""JstackWindowsDumper class."""
|
||||
|
||||
root_logger.warning("Debugger jstack not supported, skipping dumping of %d" % (pid))
|
||||
@staticmethod
|
||||
def dump_info(root_logger, pid):
|
||||
"""Dump java thread stack traces to the logger."""
|
||||
|
||||
root_logger.warning("Debugger jstack not supported, skipping dumping of %d", pid)
|
||||
|
||||
|
||||
def get_hang_analyzers():
|
||||
"""Return hang analyzers."""
|
||||
|
||||
dbg = None
|
||||
jstack = None
|
||||
ps = None
|
||||
@ -464,7 +498,7 @@ def get_hang_analyzers():
|
||||
dbg = GDBDumper()
|
||||
jstack = JstackDumper()
|
||||
ps = SolarisProcessList()
|
||||
elif _is_windows or sys.platform == "cygwin":
|
||||
elif _IS_WINDOWS or sys.platform == "cygwin":
|
||||
dbg = WindowsDumper()
|
||||
jstack = JstackWindowsDumper()
|
||||
ps = WindowsProcessList()
|
||||
@ -477,7 +511,7 @@ def get_hang_analyzers():
|
||||
|
||||
|
||||
def check_dump_quota(quota, ext):
|
||||
"""Check if sum of the files with ext is within the specified quota in megabytes"""
|
||||
"""Check if sum of the files with ext is within the specified quota in megabytes."""
|
||||
|
||||
files = glob.glob("*." + ext)
|
||||
|
||||
@ -485,11 +519,11 @@ def check_dump_quota(quota, ext):
|
||||
for file_name in files:
|
||||
size_sum += os.path.getsize(file_name)
|
||||
|
||||
return (size_sum <= quota)
|
||||
return size_sum <= quota
|
||||
|
||||
|
||||
def signal_event_object(logger, pid):
|
||||
"""Signal the Windows event object"""
|
||||
"""Signal the Windows event object."""
|
||||
|
||||
# Use unique event_name created.
|
||||
event_name = "Global\\Mongo_Python_" + str(pid)
|
||||
@ -499,13 +533,13 @@ def signal_event_object(logger, pid):
|
||||
inherit_handle = False
|
||||
task_timeout_handle = win32event.OpenEvent(desired_access, inherit_handle, event_name)
|
||||
except win32event.error as err:
|
||||
logger.info("Exception from win32event.OpenEvent with error: %s" % err)
|
||||
logger.info("Exception from win32event.OpenEvent with error: %s", err)
|
||||
return
|
||||
|
||||
try:
|
||||
win32event.SetEvent(task_timeout_handle)
|
||||
except win32event.error as err:
|
||||
logger.info("Exception from win32event.SetEvent with error: %s" % err)
|
||||
logger.info("Exception from win32event.SetEvent with error: %s", err)
|
||||
finally:
|
||||
win32api.CloseHandle(task_timeout_handle)
|
||||
|
||||
@ -514,23 +548,24 @@ def signal_event_object(logger, pid):
|
||||
|
||||
|
||||
def signal_process(logger, pid, signalnum):
|
||||
"""Signal process with signal, N/A on Windows"""
|
||||
"""Signal process with signal, N/A on Windows."""
|
||||
try:
|
||||
os.kill(pid, signalnum)
|
||||
|
||||
logger.info("Waiting for process to report")
|
||||
time.sleep(5)
|
||||
except OSError, e:
|
||||
logger.error("Hit OS error trying to signal process: %s" % str(e))
|
||||
except OSError, err:
|
||||
logger.error("Hit OS error trying to signal process: %s", err)
|
||||
|
||||
except AttributeError:
|
||||
logger.error("Cannot send signal to a process on Windows")
|
||||
|
||||
|
||||
def pname_match(match_type, pname, interesting_processes):
|
||||
"""Return True if the pname matches in interesting_processes."""
|
||||
pname = os.path.splitext(pname)[0]
|
||||
for ip in interesting_processes:
|
||||
if (match_type == 'exact' and pname == ip or match_type == 'contains' and ip in pname):
|
||||
if match_type == 'exact' and pname == ip or match_type == 'contains' and ip in pname:
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -539,32 +574,33 @@ def pname_match(match_type, pname, interesting_processes):
|
||||
#
|
||||
# 1. Get a list of interesting processes
|
||||
# 2. Dump useful information or take dumps
|
||||
def main():
|
||||
def main(): # pylint: disable=too-many-branches,too-many-locals,too-many-statements
|
||||
"""Execute Main program."""
|
||||
root_logger = logging.Logger("hang_analyzer", level=logging.DEBUG)
|
||||
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(logging.Formatter(fmt="%(message)s"))
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
root_logger.info("Python Version: %s" % sys.version)
|
||||
root_logger.info("OS: %s" % platform.platform())
|
||||
root_logger.info("Python Version: %s", sys.version)
|
||||
root_logger.info("OS: %s", platform.platform())
|
||||
|
||||
try:
|
||||
if _is_windows or sys.platform == "cygwin":
|
||||
if _IS_WINDOWS or sys.platform == "cygwin":
|
||||
distro = platform.win32_ver()
|
||||
root_logger.info("Windows Distribution: %s" % str(distro))
|
||||
root_logger.info("Windows Distribution: %s", distro)
|
||||
else:
|
||||
distro = platform.linux_distribution()
|
||||
root_logger.info("Linux Distribution: %s" % str(distro))
|
||||
root_logger.info("Linux Distribution: %s", distro)
|
||||
|
||||
except AttributeError:
|
||||
root_logger.warning("Cannot determine Linux distro since Python is too old")
|
||||
|
||||
try:
|
||||
uid = os.getuid()
|
||||
root_logger.info("Current User: %s" % str(uid))
|
||||
root_logger.info("Current User: %s", uid)
|
||||
current_login = os.getlogin()
|
||||
root_logger.info("Current Login: %s" % current_login)
|
||||
root_logger.info("Current Login: %s", current_login)
|
||||
except OSError:
|
||||
root_logger.warning("Cannot determine Unix Current Login")
|
||||
except AttributeError:
|
||||
@ -577,10 +613,10 @@ def main():
|
||||
parser = OptionParser(description=__doc__)
|
||||
parser.add_option('-m', '--process-match', dest='process_match', choices=['contains', 'exact'],
|
||||
default='contains',
|
||||
help=("Type of match for process names (-p & -g), specify 'contains', or"
|
||||
" 'exact'. Note that the process name match performs the following"
|
||||
" conversions: change all process names to lowecase, strip off the file"
|
||||
" extension, like '.exe' on Windows. Default is 'contains'."))
|
||||
help="Type of match for process names (-p & -g), specify 'contains', or"
|
||||
" 'exact'. Note that the process name match performs the following"
|
||||
" conversions: change all process names to lowecase, strip off the file"
|
||||
" extension, like '.exe' on Windows. Default is 'contains'.")
|
||||
parser.add_option('-p', '--process-names', dest='process_names',
|
||||
help='Comma separated list of process names to analyze')
|
||||
parser.add_option('-g', '--go-process-names', dest='go_process_names',
|
||||
@ -594,15 +630,15 @@ def main():
|
||||
help='Maximum total size of core dumps to keep in megabytes')
|
||||
parser.add_option('-o', '--debugger-output', dest='debugger_output', action="append",
|
||||
choices=['file', 'stdout'], default=None,
|
||||
help=("If 'stdout', then the debugger's output is written to the Python"
|
||||
" process's stdout. If 'file', then the debugger's output is written"
|
||||
" to a file named debugger_<process>_<pid>.log for each process it"
|
||||
" attaches to. This option can be specified multiple times on the"
|
||||
" command line to have the debugger's output written to multiple"
|
||||
" locations. By default, the debugger's output is written only to the"
|
||||
" Python process's stdout."))
|
||||
help="If 'stdout', then the debugger's output is written to the Python"
|
||||
" process's stdout. If 'file', then the debugger's output is written"
|
||||
" to a file named debugger_<process>_<pid>.log for each process it"
|
||||
" attaches to. This option can be specified multiple times on the"
|
||||
" command line to have the debugger's output written to multiple"
|
||||
" locations. By default, the debugger's output is written only to the"
|
||||
" Python process's stdout.")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
(options, _) = parser.parse_args()
|
||||
|
||||
if options.debugger_output is None:
|
||||
options.debugger_output = ['stdout']
|
||||
@ -621,7 +657,7 @@ def main():
|
||||
[ps, dbg, jstack] = get_hang_analyzers()
|
||||
|
||||
if ps is None or (dbg is None and jstack is None):
|
||||
root_logger.warning("hang_analyzer.py: Unsupported platform: %s" % (sys.platform))
|
||||
root_logger.warning("hang_analyzer.py: Unsupported platform: %s", sys.platform)
|
||||
exit(1)
|
||||
|
||||
all_processes = ps.dump_processes(root_logger)
|
||||
@ -640,14 +676,14 @@ def main():
|
||||
running_pids = set([pid for (pid, pname) in all_processes])
|
||||
missing_pids = set(process_ids) - running_pids
|
||||
if missing_pids:
|
||||
root_logger.warning(
|
||||
"The following requested process ids are not running %s" % list(missing_pids))
|
||||
root_logger.warning("The following requested process ids are not running %s",
|
||||
list(missing_pids))
|
||||
else:
|
||||
processes = [(pid, pname) for (pid, pname) in all_processes
|
||||
if pname_match(options.process_match, pname, interesting_processes)
|
||||
and pid != os.getpid()]
|
||||
|
||||
root_logger.info("Found %d interesting processes %s" % (len(processes), processes))
|
||||
root_logger.info("Found %d interesting processes %s", len(processes), processes)
|
||||
|
||||
max_dump_size_bytes = int(options.max_core_dumps_size) * 1024 * 1024
|
||||
|
||||
@ -656,13 +692,13 @@ def main():
|
||||
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn.startswith("python")]:
|
||||
# On Windows, we set up an event object to wait on a signal. For Cygwin, we register
|
||||
# a signal handler to wait for the signal since it supports POSIX signals.
|
||||
if _is_windows:
|
||||
root_logger.info("Calling SetEvent to signal python process %s with PID %d" %
|
||||
(process_name, pid))
|
||||
if _IS_WINDOWS:
|
||||
root_logger.info("Calling SetEvent to signal python process %s with PID %d",
|
||||
process_name, pid)
|
||||
signal_event_object(root_logger, pid)
|
||||
else:
|
||||
root_logger.info("Sending signal SIGUSR1 to python process %s with PID %d" %
|
||||
(process_name, pid))
|
||||
root_logger.info("Sending signal SIGUSR1 to python process %s with PID %d",
|
||||
process_name, pid)
|
||||
signal_process(root_logger, pid, signal.SIGUSR1)
|
||||
|
||||
trapped_exceptions = []
|
||||
@ -674,26 +710,25 @@ def main():
|
||||
try:
|
||||
dbg.dump_info(root_logger, process_logger, pid, process_name, options.dump_core
|
||||
and check_dump_quota(max_dump_size_bytes, dbg.get_dump_ext()))
|
||||
except Exception as err:
|
||||
root_logger.info("Error encountered when invoking debugger %s" % err)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
root_logger.info("Error encountered when invoking debugger %s", err)
|
||||
trapped_exceptions.append(traceback.format_exc())
|
||||
|
||||
# Dump java processes using jstack.
|
||||
# Dump java processes using jstack.
|
||||
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn.startswith("java")]:
|
||||
process_logger = get_process_logger(options.debugger_output, pid, process_name)
|
||||
try:
|
||||
jstack.dump_info(root_logger, process_logger, pid, process_name)
|
||||
except Exception as err:
|
||||
root_logger.info("Error encountered when invoking debugger %s" % err)
|
||||
jstack.dump_info(root_logger, pid)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
root_logger.info("Error encountered when invoking debugger %s", err)
|
||||
trapped_exceptions.append(traceback.format_exc())
|
||||
|
||||
# Signal go processes to ensure they print out stack traces, and die on POSIX OSes.
|
||||
# On Windows, this will simply kill the process since python emulates SIGABRT as
|
||||
# TerminateProcess.
|
||||
# Note: The stacktrace output may be captured elsewhere (i.e. resmoke).
|
||||
# Signal go processes to ensure they print out stack traces, and die on POSIX OSes.
|
||||
# On Windows, this will simply kill the process since python emulates SIGABRT as
|
||||
# TerminateProcess.
|
||||
# Note: The stacktrace output may be captured elsewhere (i.e. resmoke).
|
||||
for (pid, process_name) in [(p, pn) for (p, pn) in processes if pn in go_processes]:
|
||||
root_logger.info("Sending signal SIGABRT to go process %s with PID %d" % (process_name,
|
||||
pid))
|
||||
root_logger.info("Sending signal SIGABRT to go process %s with PID %d", process_name, pid)
|
||||
signal_process(root_logger, pid, signal.SIGABRT)
|
||||
|
||||
root_logger.info("Done analyzing all processes for hangs")
|
||||
|
@ -649,11 +649,11 @@ def _bind_chained_struct(ctxt, parsed_spec, ast_struct, chained_struct):
|
||||
chained_struct.name)
|
||||
|
||||
if not syntax_symbol:
|
||||
return None
|
||||
return
|
||||
|
||||
if not isinstance(syntax_symbol, syntax.Struct) or isinstance(syntax_symbol, syntax.Command):
|
||||
ctxt.add_chained_struct_not_found_error(ast_struct, chained_struct.name)
|
||||
return None
|
||||
return
|
||||
|
||||
struct = cast(syntax.Struct, syntax_symbol)
|
||||
|
||||
@ -808,5 +808,5 @@ def bind(parsed_spec):
|
||||
|
||||
if ctxt.errors.has_errors():
|
||||
return ast.IDLBoundSpec(None, ctxt.errors)
|
||||
else:
|
||||
return ast.IDLBoundSpec(bound_spec, None)
|
||||
|
||||
return ast.IDLBoundSpec(bound_spec, None)
|
||||
|
@ -155,10 +155,6 @@ class CppTypeBase(object):
|
||||
class _CppTypeBasic(CppTypeBase):
|
||||
"""Default class for C++ Type information. Does not handle view types."""
|
||||
|
||||
def __init__(self, field):
|
||||
# type: (ast.Field) -> None
|
||||
super(_CppTypeBasic, self).__init__(field)
|
||||
|
||||
def get_type_name(self):
|
||||
# type: () -> unicode
|
||||
if self._field.struct_type:
|
||||
@ -284,10 +280,6 @@ class _CppTypeView(CppTypeBase):
|
||||
class _CppTypeVector(CppTypeBase):
|
||||
"""Base type for C++ Std::Vector Types information."""
|
||||
|
||||
def __init__(self, field):
|
||||
# type: (ast.Field) -> None
|
||||
super(_CppTypeVector, self).__init__(field)
|
||||
|
||||
def get_type_name(self):
|
||||
# type: () -> unicode
|
||||
return 'std::vector<std::uint8_t>'
|
||||
@ -395,10 +387,6 @@ class _CppTypeDelegating(CppTypeBase):
|
||||
class _CppTypeArray(_CppTypeDelegating):
|
||||
"""C++ Array type for wrapping a base C++ Type information."""
|
||||
|
||||
def __init__(self, base, field):
|
||||
# type: (CppTypeBase, ast.Field) -> None
|
||||
super(_CppTypeArray, self).__init__(base, field)
|
||||
|
||||
def get_storage_type(self):
|
||||
# type: () -> unicode
|
||||
return _qualify_array_type(self._base.get_storage_type())
|
||||
@ -422,8 +410,7 @@ class _CppTypeArray(_CppTypeDelegating):
|
||||
convert = self.get_transform_to_getter_type(member_name)
|
||||
if convert:
|
||||
return common.template_args('return ${convert};', convert=convert)
|
||||
else:
|
||||
return self._base.get_getter_body(member_name)
|
||||
return self._base.get_getter_body(member_name)
|
||||
|
||||
def get_setter_body(self, member_name):
|
||||
# type: (unicode) -> unicode
|
||||
@ -431,8 +418,7 @@ class _CppTypeArray(_CppTypeDelegating):
|
||||
if convert:
|
||||
return common.template_args('${member_name} = ${convert};', member_name=member_name,
|
||||
convert=convert)
|
||||
else:
|
||||
return self._base.get_setter_body(member_name)
|
||||
return self._base.get_setter_body(member_name)
|
||||
|
||||
def get_transform_to_getter_type(self, expression):
|
||||
# type: (unicode) -> Optional[unicode]
|
||||
@ -441,8 +427,7 @@ class _CppTypeArray(_CppTypeDelegating):
|
||||
'transformVector(${expression})',
|
||||
expression=expression,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def get_transform_to_storage_type(self, expression):
|
||||
# type: (unicode) -> Optional[unicode]
|
||||
@ -451,17 +436,12 @@ class _CppTypeArray(_CppTypeDelegating):
|
||||
'transformVector(${expression})',
|
||||
expression=expression,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
class _CppTypeOptional(_CppTypeDelegating):
|
||||
"""Base type for Optional C++ Type information which wraps C++ types."""
|
||||
|
||||
def __init__(self, base, field):
|
||||
# type: (CppTypeBase, ast.Field) -> None
|
||||
super(_CppTypeOptional, self).__init__(base, field)
|
||||
|
||||
def get_storage_type(self):
|
||||
# type: () -> unicode
|
||||
return _qualify_optional_type(self._base.get_storage_type())
|
||||
@ -502,8 +482,7 @@ class _CppTypeOptional(_CppTypeDelegating):
|
||||
return common.template_args('return ${param_type}{${member_name}};',
|
||||
param_type=self.get_getter_setter_type(),
|
||||
member_name=member_name)
|
||||
else:
|
||||
return common.template_args('return ${member_name};', member_name=member_name)
|
||||
return common.template_args('return ${member_name};', member_name=member_name)
|
||||
|
||||
def get_setter_body(self, member_name):
|
||||
# type: (unicode) -> unicode
|
||||
@ -517,13 +496,11 @@ class _CppTypeOptional(_CppTypeDelegating):
|
||||
${member_name} = boost::none;
|
||||
}
|
||||
"""), member_name=member_name, convert=convert)
|
||||
else:
|
||||
return self._base.get_setter_body(member_name)
|
||||
return self._base.get_setter_body(member_name)
|
||||
|
||||
|
||||
def get_cpp_type(field):
|
||||
# type: (ast.Field) -> CppTypeBase
|
||||
# pylint: disable=redefined-variable-type
|
||||
"""Get the C++ Type information for the given field."""
|
||||
|
||||
cpp_type_info = None # type: Any
|
||||
@ -533,7 +510,7 @@ def get_cpp_type(field):
|
||||
elif field.cpp_type == 'std::vector<std::uint8_t>':
|
||||
cpp_type_info = _CppTypeVector(field)
|
||||
else:
|
||||
cpp_type_info = _CppTypeBasic(field) # pylint: disable=redefined-variable-type
|
||||
cpp_type_info = _CppTypeBasic(field)
|
||||
|
||||
if field.array:
|
||||
cpp_type_info = _CppTypeArray(cpp_type_info, field)
|
||||
@ -617,10 +594,6 @@ class _CommonBsonCppTypeBase(BsonCppTypeBase):
|
||||
class _ObjectBsonCppTypeBase(BsonCppTypeBase):
|
||||
"""Custom C++ support for object BSON types."""
|
||||
|
||||
def __init__(self, field):
|
||||
# type: (ast.Field) -> None
|
||||
super(_ObjectBsonCppTypeBase, self).__init__(field)
|
||||
|
||||
def gen_deserializer_expression(self, indented_writer, object_instance):
|
||||
# type: (writer.IndentedTextWriter, unicode) -> unicode
|
||||
if self._field.deserializer:
|
||||
@ -630,9 +603,8 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
|
||||
object_instance=object_instance))
|
||||
return "localObject"
|
||||
|
||||
else:
|
||||
# Just pass the BSONObj through without trying to parse it.
|
||||
return common.template_args('${object_instance}.Obj()', object_instance=object_instance)
|
||||
# Just pass the BSONObj through without trying to parse it.
|
||||
return common.template_args('${object_instance}.Obj()', object_instance=object_instance)
|
||||
|
||||
def has_serializer(self):
|
||||
# type: () -> bool
|
||||
@ -650,18 +622,13 @@ class _ObjectBsonCppTypeBase(BsonCppTypeBase):
|
||||
class _BinDataBsonCppTypeBase(BsonCppTypeBase):
|
||||
"""Custom C++ support for all binData BSON types."""
|
||||
|
||||
def __init__(self, field):
|
||||
# type: (ast.Field) -> None
|
||||
super(_BinDataBsonCppTypeBase, self).__init__(field)
|
||||
|
||||
def gen_deserializer_expression(self, indented_writer, object_instance):
|
||||
# type: (writer.IndentedTextWriter, unicode) -> unicode
|
||||
if self._field.bindata_subtype == 'uuid':
|
||||
return common.template_args('${object_instance}.uuid()',
|
||||
object_instance=object_instance)
|
||||
else:
|
||||
return common.template_args('${object_instance}._binDataVector()',
|
||||
object_instance=object_instance)
|
||||
return common.template_args('${object_instance}._binDataVector()',
|
||||
object_instance=object_instance)
|
||||
|
||||
def has_serializer(self):
|
||||
# type: () -> bool
|
||||
|
@ -117,10 +117,6 @@ class _EnumTypeInt(EnumTypeInfoBase):
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, idl_enum):
|
||||
# type: (Union[syntax.Enum,ast.Enum]) -> None
|
||||
super(_EnumTypeInt, self).__init__(idl_enum)
|
||||
|
||||
def get_cpp_type_name(self):
|
||||
# type: () -> unicode
|
||||
return common.title_case(self._enum.name)
|
||||
@ -196,10 +192,6 @@ class _EnumTypeString(EnumTypeInfoBase):
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, idl_enum):
|
||||
# type: (Union[syntax.Enum,ast.Enum]) -> None
|
||||
super(_EnumTypeString, self).__init__(idl_enum)
|
||||
|
||||
def get_cpp_type_name(self):
|
||||
# type: () -> unicode
|
||||
return common.template_args("${enum_name}Enum", enum_name=common.title_case(
|
||||
|
@ -108,7 +108,7 @@ class ParserError(common.SourceLocation):
|
||||
|
||||
def __init__(self, error_id, msg, file_name, line, column):
|
||||
# type: (unicode, unicode, unicode, int, int) -> None
|
||||
""""Construct a parser error with source location information."""
|
||||
"""Construct a parser error with source location information."""
|
||||
# pylint: disable=too-many-arguments
|
||||
self.error_id = error_id
|
||||
self.msg = msg
|
||||
@ -116,8 +116,7 @@ class ParserError(common.SourceLocation):
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
Return a formatted error.
|
||||
"""Return a formatted error.
|
||||
|
||||
Example error message:
|
||||
test.idl: (17, 4): ID0008: Unknown IDL node 'cpp_namespac' for YAML entity 'global'.
|
||||
@ -128,11 +127,11 @@ class ParserError(common.SourceLocation):
|
||||
|
||||
|
||||
class ParserErrorCollection(object):
|
||||
"""A collection of parser errors with source context information."""
|
||||
"""Collection of parser errors with source context information."""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Default constructor."""
|
||||
"""Initialize ParserErrorCollection."""
|
||||
self._errors = [] # type: List[ParserError]
|
||||
|
||||
def add(self, location, error_id, msg):
|
||||
@ -310,9 +309,8 @@ class ParserContext(object):
|
||||
assert self.is_scalar_sequence_or_scalar_node(node, "unknown")
|
||||
if node.id == "scalar":
|
||||
return [node.value]
|
||||
else:
|
||||
# Unzip the list of ScalarNode
|
||||
return [v.value for v in node.value]
|
||||
# Unzip the list of ScalarNode
|
||||
return [v.value for v in node.value]
|
||||
|
||||
def add_duplicate_error(self, node, node_name):
|
||||
# type: (yaml.nodes.Node, unicode) -> None
|
||||
|
@ -103,9 +103,8 @@ def _get_bson_type_check(bson_element, ctxt_name, field):
|
||||
if not bson_types[0] == 'bindata':
|
||||
return '%s.checkAndAssertType(%s, %s)' % (ctxt_name, bson_element,
|
||||
bson.cpp_bson_type_name(bson_types[0]))
|
||||
else:
|
||||
return '%s.checkAndAssertBinDataType(%s, %s)' % (
|
||||
ctxt_name, bson_element, bson.cpp_bindata_subtype_type_name(field.bindata_subtype))
|
||||
return '%s.checkAndAssertBinDataType(%s, %s)' % (
|
||||
ctxt_name, bson_element, bson.cpp_bindata_subtype_type_name(field.bindata_subtype))
|
||||
else:
|
||||
type_list = '{%s}' % (', '.join([bson.cpp_bson_type_name(b) for b in bson_types]))
|
||||
return '%s.checkAndAssertTypes(%s, %s)' % (ctxt_name, bson_element, type_list)
|
||||
@ -372,11 +371,6 @@ class _CppFileWriterBase(object):
|
||||
class _CppHeaderFileWriter(_CppFileWriterBase):
|
||||
"""C++ .h File writer."""
|
||||
|
||||
def __init__(self, indented_writer):
|
||||
# type: (writer.IndentedTextWriter) -> None
|
||||
"""Create a C++ .cpp file code writer."""
|
||||
super(_CppHeaderFileWriter, self).__init__(indented_writer)
|
||||
|
||||
def gen_class_declaration_block(self, class_name):
|
||||
# type: (unicode) -> writer.IndentedScopedBlock
|
||||
"""Generate a class declaration block."""
|
||||
@ -770,39 +764,37 @@ class _CppSourceFileWriter(_CppFileWriterBase):
|
||||
elif field.deserializer and 'BSONElement::' in field.deserializer:
|
||||
method_name = writer.get_method_name(field.deserializer)
|
||||
return '%s.%s()' % (element_name, method_name)
|
||||
else:
|
||||
# Custom method, call the method on object.
|
||||
bson_cpp_type = cpp_types.get_bson_cpp_type(field)
|
||||
|
||||
if bson_cpp_type:
|
||||
# Call a static class method with the signature:
|
||||
# Class Class::method(StringData value)
|
||||
# or
|
||||
# Class::method(const BSONObj& value)
|
||||
expression = bson_cpp_type.gen_deserializer_expression(self._writer, element_name)
|
||||
if field.deserializer:
|
||||
method_name = writer.get_method_name_from_qualified_method_name(
|
||||
field.deserializer)
|
||||
# Custom method, call the method on object.
|
||||
bson_cpp_type = cpp_types.get_bson_cpp_type(field)
|
||||
|
||||
# For fields which are enums, pass a IDLParserErrorContext
|
||||
if field.enum_type:
|
||||
self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
|
||||
(_get_field_constant_name(field)))
|
||||
return common.template_args("${method_name}(tempContext, ${expression})",
|
||||
method_name=method_name, expression=expression)
|
||||
else:
|
||||
return common.template_args("${method_name}(${expression})",
|
||||
method_name=method_name, expression=expression)
|
||||
else:
|
||||
# BSONObjects are allowed to be pass through without deserialization
|
||||
assert field.bson_serialization_type == ['object']
|
||||
return expression
|
||||
else:
|
||||
# Call a static class method with the signature:
|
||||
# Class Class::method(const BSONElement& value)
|
||||
if bson_cpp_type:
|
||||
# Call a static class method with the signature:
|
||||
# Class Class::method(StringData value)
|
||||
# or
|
||||
# Class::method(const BSONObj& value)
|
||||
expression = bson_cpp_type.gen_deserializer_expression(self._writer, element_name)
|
||||
if field.deserializer:
|
||||
method_name = writer.get_method_name_from_qualified_method_name(field.deserializer)
|
||||
|
||||
return '%s(%s)' % (method_name, element_name)
|
||||
# For fields which are enums, pass a IDLParserErrorContext
|
||||
if field.enum_type:
|
||||
self._writer.write_line('IDLParserErrorContext tempContext(%s, &ctxt);' %
|
||||
(_get_field_constant_name(field)))
|
||||
return common.template_args("${method_name}(tempContext, ${expression})",
|
||||
method_name=method_name, expression=expression)
|
||||
return common.template_args("${method_name}(${expression})",
|
||||
method_name=method_name, expression=expression)
|
||||
|
||||
# BSONObjects are allowed to be pass through without deserialization
|
||||
assert field.bson_serialization_type == ['object']
|
||||
return expression
|
||||
|
||||
# Call a static class method with the signature:
|
||||
# Class Class::method(const BSONElement& value)
|
||||
method_name = writer.get_method_name_from_qualified_method_name(field.deserializer)
|
||||
|
||||
return '%s(%s)' % (method_name, element_name)
|
||||
|
||||
def _gen_array_deserializer(self, field, bson_element):
|
||||
# type: (ast.Field, unicode) -> None
|
||||
|
@ -22,9 +22,9 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import io
|
||||
from typing import Any, Callable, Dict, List, Set, Tuple, Union
|
||||
import yaml
|
||||
from yaml import nodes
|
||||
from typing import Any, Callable, Dict, List, Set, Tuple, Union
|
||||
|
||||
from . import common
|
||||
from . import cpp_types
|
||||
@ -65,7 +65,7 @@ def _generic_parser(
|
||||
syntax_node_name, # type: unicode
|
||||
syntax_node, # type: Any
|
||||
mapping_rules # type: Dict[unicode, _RuleDesc]
|
||||
):
|
||||
): # type: (...) -> None
|
||||
# pylint: disable=too-many-branches
|
||||
field_name_set = set() # type: Set[str]
|
||||
|
||||
@ -129,7 +129,7 @@ def _parse_mapping(
|
||||
node, # type: Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]
|
||||
syntax_node_name, # type: unicode
|
||||
func # type: Callable[[errors.ParserContext,syntax.IDLSpec,unicode,Union[yaml.nodes.MappingNode, yaml.nodes.ScalarNode, yaml.nodes.SequenceNode]], None]
|
||||
):
|
||||
): # type: (...) -> None
|
||||
"""Parse a top-level mapping section in the IDL file."""
|
||||
if not ctxt.is_mapping_node(node, syntax_node_name):
|
||||
return
|
||||
@ -542,10 +542,10 @@ def _parse(stream, error_file_name):
|
||||
|
||||
if ctxt.errors.has_errors():
|
||||
return syntax.IDLParsedSpec(None, ctxt.errors)
|
||||
else:
|
||||
_propagate_globals(spec)
|
||||
|
||||
return syntax.IDLParsedSpec(spec, None)
|
||||
_propagate_globals(spec)
|
||||
|
||||
return syntax.IDLParsedSpec(spec, None)
|
||||
|
||||
|
||||
class ImportResolverBase(object):
|
||||
@ -625,7 +625,7 @@ def parse(stream, input_file_name, resolver):
|
||||
return parsed_doc
|
||||
|
||||
# We need to generate includes for imported IDL files which have structs
|
||||
if base_file_name == input_file_name and len(parsed_doc.spec.symbols.structs):
|
||||
if base_file_name == input_file_name and parsed_doc.spec.symbols.structs:
|
||||
needs_include.append(imported_file_name)
|
||||
|
||||
# Add other imported files to the list of files to parse
|
||||
|
@ -315,22 +315,6 @@ class _IgnoredCommandTypeInfo(_CommandBaseTypeInfo):
|
||||
common.title_case(self._struct.cpp_name), 'toBSON',
|
||||
['const BSONObj& commandPassthroughFields'], 'BSONObj', const=True)
|
||||
|
||||
def get_deserializer_static_method(self):
|
||||
# type: () -> MethodInfo
|
||||
return super(_IgnoredCommandTypeInfo, self).get_deserializer_static_method()
|
||||
|
||||
def get_deserializer_method(self):
|
||||
# type: () -> MethodInfo
|
||||
return super(_IgnoredCommandTypeInfo, self).get_deserializer_method()
|
||||
|
||||
def gen_getter_method(self, indented_writer):
|
||||
# type: (writer.IndentedTextWriter) -> None
|
||||
super(_IgnoredCommandTypeInfo, self).gen_getter_method(indented_writer)
|
||||
|
||||
def gen_member(self, indented_writer):
|
||||
# type: (writer.IndentedTextWriter) -> None
|
||||
super(_IgnoredCommandTypeInfo, self).gen_member(indented_writer)
|
||||
|
||||
def gen_serializer(self, indented_writer):
|
||||
# type: (writer.IndentedTextWriter) -> None
|
||||
indented_writer.write_line('builder->append("%s", 1);' % (self._command.name))
|
||||
@ -464,7 +448,6 @@ def get_struct_info(struct):
|
||||
return _IgnoredCommandTypeInfo(struct)
|
||||
elif struct.namespace == common.COMMAND_NAMESPACE_CONCATENATE_WITH_DB:
|
||||
return _CommandWithNamespaceTypeInfo(struct)
|
||||
else:
|
||||
return _CommandFromType(struct)
|
||||
return _CommandFromType(struct)
|
||||
|
||||
return _StructTypeInfo(struct)
|
||||
|
@ -26,7 +26,7 @@ import idl.compiler
|
||||
|
||||
def main():
|
||||
# type: () -> None
|
||||
"""Main Entry point."""
|
||||
"""Execute Main Entry point."""
|
||||
parser = argparse.ArgumentParser(description='MongoDB IDL Compiler.')
|
||||
|
||||
parser.add_argument('file', type=str, help="IDL input file")
|
||||
|
@ -39,11 +39,6 @@ def errors_to_str(errors):
|
||||
class NothingImportResolver(idl.parser.ImportResolverBase):
|
||||
"""An import resolver that does nothing."""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Construct a NothingImportResolver."""
|
||||
super(NothingImportResolver, self).__init__()
|
||||
|
||||
def resolve(self, base_file, imported_file_name):
|
||||
# type: (unicode, unicode) -> unicode
|
||||
"""Return the complete path to an imported file name."""
|
||||
|
@ -7,13 +7,15 @@ import jira
|
||||
|
||||
class JiraClient(object):
|
||||
"""A client for JIRA."""
|
||||
|
||||
CLOSE_TRANSITION_NAME = "Close Issue"
|
||||
RESOLVE_TRANSITION_NAME = "Resolve Issue"
|
||||
FIXED_RESOLUTION_NAME = "Fixed"
|
||||
WONT_FIX_RESOLUTION_NAME = "Won't Fix"
|
||||
|
||||
def __init__(self, server, username=None, password=None, access_token=None,
|
||||
access_token_secret=None, consumer_key=None, key_cert=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, server, username=None, password=None, access_token=None, access_token_secret=None,
|
||||
consumer_key=None, key_cert=None):
|
||||
"""Initialize the JiraClient with the server URL and user credentials."""
|
||||
opts = {"server": server, "verify": True}
|
||||
basic_auth = None
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Utility for computing test failure rates from the Evergreen API.
|
||||
"""
|
||||
"""Utility for computing test failure rates from the Evergreen API."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
@ -21,7 +19,7 @@ import warnings
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlparse # type: ignore
|
||||
|
||||
import requests
|
||||
import requests.exceptions
|
||||
@ -47,18 +45,17 @@ _ReportEntry = collections.namedtuple("_ReportEntry", [
|
||||
|
||||
|
||||
class Wildcard(object):
|
||||
"""
|
||||
A class for representing there are multiple values associated with a particular component.
|
||||
"""
|
||||
"""Class for representing there are multiple values associated with a particular component."""
|
||||
|
||||
def __init__(self, kind):
|
||||
"""Initialize Wildcard."""
|
||||
self._kind = kind
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Wildcard):
|
||||
return NotImplemented
|
||||
|
||||
return self._kind == other._kind
|
||||
return self._kind == other._kind # pylint: disable=protected-access
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
@ -71,9 +68,7 @@ class Wildcard(object):
|
||||
|
||||
|
||||
class ReportEntry(_ReportEntry):
|
||||
"""
|
||||
Holds information about Evergreen test executions.
|
||||
"""
|
||||
"""Information about Evergreen test executions."""
|
||||
|
||||
_MULTIPLE_TESTS = Wildcard("tests")
|
||||
_MULTIPLE_TASKS = Wildcard("tasks")
|
||||
@ -85,8 +80,7 @@ class ReportEntry(_ReportEntry):
|
||||
|
||||
@property
|
||||
def fail_rate(self):
|
||||
"""
|
||||
Returns the fraction of test failures to total number of test executions.
|
||||
"""Get the fraction of test failures to total number of test executions.
|
||||
|
||||
If a test hasn't been run at all, then we still say it has a failure rate of 0% for
|
||||
convenience when applying thresholds.
|
||||
@ -97,9 +91,9 @@ class ReportEntry(_ReportEntry):
|
||||
return self.num_fail / (self.num_pass + self.num_fail)
|
||||
|
||||
def period_start_date(self, start_date, period_size):
|
||||
"""
|
||||
Returns a datetime.date() instance corresponding to the beginning of the time period
|
||||
containing 'self.start_date'.
|
||||
"""Return a datetime.date() instance for the period start date.
|
||||
|
||||
The result corresponds to the beginning of the time period containing 'self.start_date'.
|
||||
"""
|
||||
|
||||
if not isinstance(start_date, datetime.date):
|
||||
@ -118,9 +112,10 @@ class ReportEntry(_ReportEntry):
|
||||
return self.start_date - datetime.timedelta(days=start_day_offset)
|
||||
|
||||
def week_start_date(self, start_day_of_week):
|
||||
"""
|
||||
Returns a datetime.date() instance corresponding to the beginning of the week containing
|
||||
'self.start_date'. The first day of the week can be specified as the strings "Sunday" or
|
||||
"""Return a datetime.date() instance of the week's start date.
|
||||
|
||||
The result corresponds to the beginning of the week containing 'self.start_date'.
|
||||
The first day of the week can be specified as the strings "Sunday" or
|
||||
"Monday", as well as an arbitrary datetime.date() instance.
|
||||
"""
|
||||
|
||||
@ -144,9 +139,9 @@ class ReportEntry(_ReportEntry):
|
||||
|
||||
@classmethod
|
||||
def sum(cls, entries):
|
||||
"""
|
||||
Returns a single ReportEntry() instance corresponding to all test executions represented by
|
||||
'entries'.
|
||||
"""Return a single ReportEntry() instance.
|
||||
|
||||
The result corresponds to all test executions represented by 'entries'.
|
||||
"""
|
||||
|
||||
test = set()
|
||||
@ -179,9 +174,7 @@ class ReportEntry(_ReportEntry):
|
||||
|
||||
|
||||
class Report(object):
|
||||
"""
|
||||
A class for generating summarizations about Evergreen test executions.
|
||||
"""
|
||||
"""Class for generating summarizations about Evergreen test executions."""
|
||||
|
||||
TEST = ("test", )
|
||||
TEST_TASK = ("test", "task")
|
||||
@ -196,9 +189,7 @@ class Report(object):
|
||||
FIRST_DAY = "first-day"
|
||||
|
||||
def __init__(self, entries):
|
||||
"""
|
||||
Initializes the Report instance.
|
||||
"""
|
||||
"""Initialize the Report instance."""
|
||||
|
||||
if not isinstance(entries, list):
|
||||
# It is possible that 'entries' is a generator function, so we convert it to a list in
|
||||
@ -215,16 +206,15 @@ class Report(object):
|
||||
|
||||
@property
|
||||
def raw_data(self):
|
||||
"""
|
||||
Returns a copy of the list of ReportEntry instances underlying the report.
|
||||
"""
|
||||
"""Get a copy of the list of ReportEntry instances underlying the report."""
|
||||
|
||||
return self._entries[:]
|
||||
|
||||
def summarize_by(self, components, time_period=None, start_day_of_week=FIRST_DAY):
|
||||
"""
|
||||
Returns a list of ReportEntry instances grouped by
|
||||
def summarize_by( # pylint: disable=too-many-branches,too-many-locals
|
||||
self, components, time_period=None, start_day_of_week=FIRST_DAY):
|
||||
"""Return a list of ReportEntry instances grouped by the following.
|
||||
|
||||
Grouping:
|
||||
'components' if 'time_period' is None,
|
||||
|
||||
'components' followed by Entry.start_date if 'time_period' is "daily",
|
||||
@ -272,9 +262,9 @@ class Report(object):
|
||||
" instance"))
|
||||
|
||||
def key_func(entry):
|
||||
"""
|
||||
Assigns a key for sorting and grouping ReportEntry instances based on the combination of
|
||||
options summarize_by() was called with.
|
||||
"""Assign a key for sorting and grouping ReportEntry instances.
|
||||
|
||||
The result is based on the combination of options summarize_by() was called with.
|
||||
"""
|
||||
|
||||
return [func(entry) for func in group_by]
|
||||
@ -303,18 +293,17 @@ class Report(object):
|
||||
|
||||
|
||||
class Missing(object):
|
||||
"""
|
||||
A class for representing the value associated with a particular component is unknown.
|
||||
"""
|
||||
"""Class for representing the value associated with a particular component is unknown."""
|
||||
|
||||
def __init__(self, kind):
|
||||
"""Initialize Missing."""
|
||||
self._kind = kind
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Missing):
|
||||
return NotImplemented
|
||||
|
||||
return self._kind == other._kind
|
||||
return self._kind == other._kind # pylint: disable=protected-access
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
@ -327,9 +316,7 @@ class Missing(object):
|
||||
|
||||
|
||||
class TestHistory(object):
|
||||
"""
|
||||
A class for interacting with the /test_history Evergreen API endpoint.
|
||||
"""
|
||||
"""Class for interacting with the /test_history Evergreen API endpoint."""
|
||||
|
||||
DEFAULT_API_SERVER = "https://evergreen.mongodb.com"
|
||||
DEFAULT_PROJECT = "mongodb-mongo-master"
|
||||
@ -345,11 +332,10 @@ class TestHistory(object):
|
||||
|
||||
_MISSING_DISTRO = Missing("distro")
|
||||
|
||||
def __init__(self, api_server=DEFAULT_API_SERVER, project=DEFAULT_PROJECT, tests=None,
|
||||
tasks=None, variants=None, distros=None):
|
||||
"""
|
||||
Initializes the TestHistory instance with the list of tests, tasks, variants, and distros
|
||||
specified.
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, api_server=DEFAULT_API_SERVER, project=DEFAULT_PROJECT, tests=None, tasks=None,
|
||||
variants=None, distros=None):
|
||||
"""Initialize the TestHistory instance with the list of tests, tasks, variants, and distros.
|
||||
|
||||
The list of tests specified are augmented to ensure that failures on both POSIX and Windows
|
||||
platforms are returned by the Evergreen API.
|
||||
@ -374,9 +360,10 @@ class TestHistory(object):
|
||||
def get_history_by_revision(self, start_revision, end_revision,
|
||||
test_statuses=DEFAULT_TEST_STATUSES,
|
||||
task_statuses=DEFAULT_TASK_STATUSES):
|
||||
"""
|
||||
Returns a list of ReportEntry instances corresponding to each individual test execution
|
||||
between 'start_revision' and 'end_revision'.
|
||||
"""Return a list of ReportEntry instances.
|
||||
|
||||
The result corresponds to each individual test execution between 'start_revision' and
|
||||
'end_revision'.
|
||||
|
||||
Only tests with status 'test_statuses' are included in the result. Similarly, only tests
|
||||
with status 'task_statuses' are included in the result. By default, both passing and failing
|
||||
@ -408,13 +395,14 @@ class TestHistory(object):
|
||||
|
||||
def get_history_by_date(self, start_date, end_date, test_statuses=DEFAULT_TEST_STATUSES,
|
||||
task_statuses=DEFAULT_TASK_STATUSES):
|
||||
"""
|
||||
Returns a list of ReportEntry instances corresponding to each individual test execution
|
||||
between 'start_date' and 'end_date'.
|
||||
"""Return a list of ReportEntry instances.
|
||||
|
||||
The result corresponds to each individual test execution between 'start_date' and
|
||||
'end_date'.
|
||||
|
||||
Only tests with status 'test_statuses' are included in the result. Similarly, only tests
|
||||
with status 'task_statuses' are included in the result. By default, both passing and failing
|
||||
test executions are returned.
|
||||
with status 'task_statuses' are included in the result. By default, both passing and
|
||||
failing test executions are returned.
|
||||
"""
|
||||
|
||||
warnings.warn(
|
||||
@ -431,8 +419,8 @@ class TestHistory(object):
|
||||
history_data = set()
|
||||
|
||||
# Since the API limits the results, with each invocation being distinct, we can simulate
|
||||
# pagination by making subsequent requests using "afterDate" and being careful to filter out
|
||||
# duplicate test results.
|
||||
# pagination by making subsequent requests using "afterDate" and being careful to filter
|
||||
# out duplicate test results.
|
||||
while True:
|
||||
params["afterDate"] = start_time
|
||||
|
||||
@ -453,8 +441,7 @@ class TestHistory(object):
|
||||
return list(history_data)
|
||||
|
||||
def _get_history(self, params):
|
||||
"""
|
||||
Calls the test_history API endpoint with the given parameters and returns the JSON result.
|
||||
"""Call the test_history API endpoint with the given parameters and return the JSON result.
|
||||
|
||||
The API calls will be retried on HTTP and connection errors.
|
||||
"""
|
||||
@ -496,9 +483,7 @@ class TestHistory(object):
|
||||
raise JSONResponseError(err)
|
||||
|
||||
def _process_test_result(self, test_result):
|
||||
"""
|
||||
Returns a ReportEntry() tuple representing the 'test_result' dictionary.
|
||||
"""
|
||||
"""Return a ReportEntry() tuple representing the 'test_result' dictionary."""
|
||||
|
||||
# For individual test executions, we intentionally use the "start_time" of the test as both
|
||||
# its 'start_date' and 'end_date' to avoid complicating how the test history is potentially
|
||||
@ -516,7 +501,8 @@ class TestHistory(object):
|
||||
|
||||
@staticmethod
|
||||
def _normalize_test_file(test_file):
|
||||
"""
|
||||
"""Return normalized test_file name.
|
||||
|
||||
If 'test_file' represents a Windows-style path, then it is converted to a POSIX-style path
|
||||
with
|
||||
|
||||
@ -536,8 +522,7 @@ class TestHistory(object):
|
||||
return test_file
|
||||
|
||||
def _denormalize_test_file(self, test_file):
|
||||
"""
|
||||
Returns a list containing 'test_file' as both a POSIX-style path and a Windows-style path.
|
||||
"""Return a list containing 'test_file' as both a POSIX-style and a Windows-style path.
|
||||
|
||||
The conversion process may involving replacing forward slashes (/) as the path separator
|
||||
with backslashes (\\), as well as adding a ".exe" extension if 'test_file' has no file
|
||||
@ -555,9 +540,7 @@ class TestHistory(object):
|
||||
return [test_file]
|
||||
|
||||
def _history_request_params(self, test_statuses, task_statuses):
|
||||
"""
|
||||
Returns the query parameters for /test_history GET request as a dictionary.
|
||||
"""
|
||||
"""Return the query parameters for /test_history GET request as a dictionary."""
|
||||
|
||||
return {
|
||||
"distros": ",".join(self._distros),
|
||||
@ -571,8 +554,7 @@ class TestHistory(object):
|
||||
|
||||
|
||||
def _parse_date(date_str):
|
||||
"""
|
||||
Returns a datetime.date instance representing the specified yyyy-mm-dd date string.
|
||||
"""Return a datetime.date instance representing the specified yyyy-mm-dd date string.
|
||||
|
||||
Note that any time component of 'date_str', including the timezone, is ignored.
|
||||
"""
|
||||
@ -584,16 +566,16 @@ def _parse_date(date_str):
|
||||
class JSONResponseError(Exception):
|
||||
"""An exception raised when failing to decode the JSON from an Evergreen response."""
|
||||
|
||||
def __init__(self, cause):
|
||||
"""Initializes the JSONResponseError with the exception raised by the requests library
|
||||
when decoding the response."""
|
||||
def __init__(self, cause): # pylint: disable=super-init-not-called
|
||||
"""Initialize the JSONResponseError.
|
||||
|
||||
It it set with the exception raised by the requests library when decoding the response.
|
||||
"""
|
||||
self.cause = cause
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Utility computing test failure rates from the Evergreen API.
|
||||
"""
|
||||
"""Execute computing test failure rates from the Evergreen API."""
|
||||
|
||||
parser = optparse.OptionParser(description=main.__doc__,
|
||||
usage="Usage: %prog [options] [test1 test2 ...]")
|
||||
@ -695,7 +677,7 @@ def main():
|
||||
|
||||
def read_evg_config():
|
||||
"""
|
||||
Attempts to parse the user's or system's Evergreen configuration from its known locations.
|
||||
Attempt to parse the user's or system's Evergreen configuration from its known locations.
|
||||
|
||||
Returns None if the configuration file wasn't found anywhere.
|
||||
"""
|
@ -1,15 +1,23 @@
|
||||
"""Lint module."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
import cpplint
|
||||
import utils
|
||||
import buildscripts.cpplint as cpplint
|
||||
import buildscripts.utils as utils
|
||||
|
||||
|
||||
class CheckForConfigH:
|
||||
class CheckForConfigH(object):
|
||||
"""CheckForConfigH class."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize CheckForConfigH."""
|
||||
self.found_configh = False
|
||||
|
||||
def __call__(self, filename, clean_lines, line_num, error):
|
||||
"""Check for a config file."""
|
||||
if self.found_configh:
|
||||
return
|
||||
|
||||
@ -21,7 +29,8 @@ class CheckForConfigH:
|
||||
'MONGO_CONFIG define used without prior inclusion of config.h.')
|
||||
|
||||
|
||||
def run_lint(paths, nudgeOn=False):
|
||||
def run_lint(paths, nudge_on=False):
|
||||
"""Run lint."""
|
||||
# errors are as of 10/14
|
||||
# idea is not to let it any new type of error
|
||||
# as we knock one out, we should remove line
|
||||
@ -70,25 +79,26 @@ def run_lint(paths, nudgeOn=False):
|
||||
nudge.append('-whitespace/tab') # errors found: 233
|
||||
|
||||
filters = later + never
|
||||
if not nudgeOn:
|
||||
if not nudge_on:
|
||||
filters = filters + nudge
|
||||
|
||||
sourceFiles = []
|
||||
for x in paths:
|
||||
utils.getAllSourceFiles(sourceFiles, x)
|
||||
source_files = []
|
||||
for path in paths:
|
||||
utils.get_all_source_files(source_files, path)
|
||||
|
||||
args = ["--linelength=100", "--filter=" + ",".join(filters), "--counting=detailed"
|
||||
] + sourceFiles
|
||||
args = \
|
||||
["--linelength=100", "--filter=" + ",".join(filters), "--counting=detailed"] + source_files
|
||||
filenames = cpplint.ParseArguments(args)
|
||||
|
||||
def _ourIsTestFilename(fn):
|
||||
if fn.find("dbtests") >= 0:
|
||||
def _our_is_test_filename(file_name):
|
||||
if file_name.find("dbtests") >= 0:
|
||||
return True
|
||||
if fn.endswith("_test.cpp"):
|
||||
if file_name.endswith("_test.cpp"):
|
||||
return True
|
||||
return False
|
||||
|
||||
cpplint._IsTestFilename = _ourIsTestFilename
|
||||
# pylint: disable=protected-access
|
||||
cpplint._IsTestFilename = _our_is_test_filename
|
||||
|
||||
# Change stderr to write with replacement characters so we don't die
|
||||
# if we try to print something containing non-ASCII characters.
|
||||
@ -102,9 +112,12 @@ def run_lint(paths, nudgeOn=False):
|
||||
cpplint._cpplint_state.PrintErrorCounts()
|
||||
|
||||
return cpplint._cpplint_state.error_count == 0
|
||||
# pylint: enable=protected-access
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
|
||||
paths = []
|
||||
nudge = False
|
||||
|
||||
@ -119,8 +132,12 @@ if __name__ == "__main__":
|
||||
sys.exit(-1)
|
||||
paths.append(arg)
|
||||
|
||||
if len(paths) == 0:
|
||||
if not paths:
|
||||
paths.append("src/mongo/")
|
||||
|
||||
if not run_lint(paths, nudge):
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -11,16 +11,18 @@ class LinterBase(object):
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, cmd_name, required_version):
|
||||
# type: (str, str) -> None
|
||||
def __init__(self, cmd_name, required_version, cmd_location=None):
|
||||
# type: (str, str, Optional[str]) -> None
|
||||
"""
|
||||
Create a linter.
|
||||
|
||||
cmd_name - short friendly name
|
||||
required_version - the required version string to check against
|
||||
cmd_location - location of executable
|
||||
"""
|
||||
self.cmd_name = cmd_name
|
||||
self.required_version = required_version
|
||||
self.cmd_location = cmd_location
|
||||
|
||||
@abstractmethod
|
||||
def get_lint_cmd_args(self, file_name):
|
||||
|
@ -69,7 +69,7 @@ class Repo(_git.Repository):
|
||||
|
||||
Returns the full path to the file for clang-format to consume.
|
||||
"""
|
||||
if candidates is not None and len(candidates) > 0:
|
||||
if candidates is not None and len(candidates) > 0: # pylint: disable=len-as-condition
|
||||
candidates = [self._get_local_dir(f) for f in candidates]
|
||||
valid_files = list(
|
||||
set(candidates).intersection(self.get_candidate_files(filter_function)))
|
||||
@ -150,7 +150,7 @@ def get_files_to_check(files, filter_function):
|
||||
candidates_nested = [expand_file_string(f) for f in files]
|
||||
candidates = list(itertools.chain.from_iterable(candidates_nested))
|
||||
|
||||
if len(files) > 0 and len(candidates) == 0:
|
||||
if files and not candidates:
|
||||
raise ValueError("Globs '%s' did not find any files with glob." % (files))
|
||||
|
||||
repos = get_repos()
|
||||
@ -159,7 +159,7 @@ def get_files_to_check(files, filter_function):
|
||||
itertools.chain.from_iterable(
|
||||
[r.get_candidates(candidates, filter_function) for r in repos]))
|
||||
|
||||
if len(files) > 0 and len(valid_files) == 0:
|
||||
if files and not valid_files:
|
||||
raise ValueError("Globs '%s' did not find any files with glob in git." % (files))
|
||||
|
||||
return valid_files
|
||||
|
@ -2,6 +2,7 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from . import base
|
||||
@ -13,7 +14,9 @@ class MypyLinter(base.LinterBase):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Create a mypy linter."""
|
||||
super(MypyLinter, self).__init__("mypy", "mypy 0.501")
|
||||
# User can override the location of mypy from an environment variable.
|
||||
|
||||
super(MypyLinter, self).__init__("mypy", "mypy 0.580", os.getenv("MYPY"))
|
||||
|
||||
def get_lint_version_cmd_args(self):
|
||||
# type: () -> List[str]
|
||||
@ -23,17 +26,7 @@ class MypyLinter(base.LinterBase):
|
||||
def get_lint_cmd_args(self, file_name):
|
||||
# type: (str) -> List[str]
|
||||
"""Get the command to run a linter."""
|
||||
# -py2 - Check Python 2 code for type annotations in comments
|
||||
# --disallow-untyped-defs - Error if any code is missing type annotations
|
||||
# --ignore-missing-imports - Do not error if imports are not found. This can be a problem
|
||||
# with standalone scripts and relative imports. This will limit effectiveness but avoids
|
||||
# mypy complaining about running code.
|
||||
# --follow-imports=silent - Do not error on imported files since all imported files may not
|
||||
# be mypy clean
|
||||
return [
|
||||
"--py2", "--disallow-untyped-defs", "--ignore-missing-imports",
|
||||
"--follow-imports=silent", file_name
|
||||
]
|
||||
return [file_name]
|
||||
|
||||
def ignore_interpreter(self):
|
||||
# type: () -> bool
|
||||
|
@ -13,7 +13,7 @@ class PyDocstyleLinter(base.LinterBase):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Create a pydocstyle linter."""
|
||||
super(PyDocstyleLinter, self).__init__("pydocstyle", "1.1.1")
|
||||
super(PyDocstyleLinter, self).__init__("pydocstyle", "2.1.1")
|
||||
|
||||
def get_lint_version_cmd_args(self):
|
||||
# type: () -> List[str]
|
||||
|
@ -15,9 +15,7 @@ class PyLintLinter(base.LinterBase):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
"""Create a pylint linter."""
|
||||
self._rc_file = os.path.join(
|
||||
os.path.normpath(git.get_base_dir()), "buildscripts", ".pylintrc")
|
||||
super(PyLintLinter, self).__init__("pylint", "pylint 1.6.5")
|
||||
super(PyLintLinter, self).__init__("pylint", "pylint 1.8.3")
|
||||
|
||||
def get_lint_version_cmd_args(self):
|
||||
# type: () -> List[str]
|
||||
@ -27,10 +25,4 @@ class PyLintLinter(base.LinterBase):
|
||||
def get_lint_cmd_args(self, file_name):
|
||||
# type: (str) -> List[str]
|
||||
"""Get the command to run a linter."""
|
||||
# pylintrc only searches parent directories if it is a part of a module, and since our code
|
||||
# is split across different modules, and individual script files, we need to specify the
|
||||
# path to the rcfile.
|
||||
# See https://pylint.readthedocs.io/en/latest/user_guide/run.html
|
||||
return [
|
||||
"--rcfile=%s" % (self._rc_file), "--output-format", "msvs", "--reports=n", file_name
|
||||
]
|
||||
return ["--output-format=msvs", "--reports=n", file_name]
|
||||
|
@ -88,7 +88,11 @@ def _find_linter(linter, config_dict):
|
||||
|
||||
if linter.ignore_interpreter():
|
||||
# Some linters use a different interpreter then the current interpreter.
|
||||
cmd_str = os.path.join('/opt/mongodbtoolchain/v2/bin', linter.cmd_name)
|
||||
# If the linter cmd_location is specified then use that location.
|
||||
if linter.cmd_location:
|
||||
cmd_str = linter.cmd_location
|
||||
else:
|
||||
cmd_str = os.path.join('/opt/mongodbtoolchain/v2/bin', linter.cmd_name)
|
||||
cmd = [cmd_str]
|
||||
else:
|
||||
cmd = [sys.executable, cmd_str]
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
'''Helper script for constructing an archive (zip or tar) from a list of files.
|
||||
"""Helper script for constructing an archive (zip or tar) from a list of files.
|
||||
|
||||
The output format (tar, tgz, zip) is determined from the file name, unless the user specifies
|
||||
--format on the command line.
|
||||
@ -10,8 +10,8 @@ directory in the archive, perhaps mongodb-2.0.2/src/mongo.
|
||||
|
||||
Usage:
|
||||
|
||||
make_archive.py -o <output-file> [--format (tar|tgz|zip)] \
|
||||
[--transform match1=replacement1 [--transform match2=replacement2 [...]]] \
|
||||
make_archive.py -o <output-file> [--format (tar|tgz|zip)] \\
|
||||
[--transform match1=replacement1 [--transform match2=replacement2 [...]]] \\
|
||||
<input file 1> [...]
|
||||
|
||||
If the input file names start with "@", the file is expected to contain a list of
|
||||
@ -23,7 +23,7 @@ match1, it is never compared against match2 or later. Matches are just python s
|
||||
comparisons.
|
||||
|
||||
For a detailed usage example, see src/SConscript.client or src/mongo/SConscript.
|
||||
'''
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
@ -36,6 +36,7 @@ from subprocess import (Popen, PIPE, STDOUT)
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Execute Main program."""
|
||||
args = []
|
||||
for arg in argv[1:]:
|
||||
if arg.startswith("@"):
|
||||
@ -49,23 +50,24 @@ def main(argv):
|
||||
opts = parse_options(args)
|
||||
if opts.archive_format in ('tar', 'tgz'):
|
||||
make_tar_archive(opts)
|
||||
elif opts.archive_format in ('zip'):
|
||||
elif opts.archive_format == 'zip':
|
||||
make_zip_archive(opts)
|
||||
else:
|
||||
raise ValueError('Unsupported archive format "%s"' % opts.archive_format)
|
||||
|
||||
|
||||
def delete_directory(dir):
|
||||
'''Recursively deletes a directory and its contents.
|
||||
'''
|
||||
def delete_directory(directory):
|
||||
"""Recursively deletes a directory and its contents."""
|
||||
try:
|
||||
shutil.rmtree(dir)
|
||||
except Exception:
|
||||
shutil.rmtree(directory)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
|
||||
def make_tar_archive(opts):
|
||||
'''Given the parsed options, generates the 'opt.output_filename'
|
||||
"""Generate tar archive.
|
||||
|
||||
Given the parsed options, generates the 'opt.output_filename'
|
||||
tarball containing all the files in 'opt.input_filename' renamed
|
||||
according to the mappings in 'opts.transformations'.
|
||||
|
||||
@ -77,9 +79,9 @@ def make_tar_archive(opts):
|
||||
required by 'opts.transformations'. Once the tarball has been
|
||||
created, all temporary directory structures created for the
|
||||
purposes of compressing, are removed.
|
||||
'''
|
||||
"""
|
||||
tar_options = "cvf"
|
||||
if opts.archive_format is 'tgz':
|
||||
if opts.archive_format == 'tgz':
|
||||
tar_options += "z"
|
||||
|
||||
# clean and create a temp directory to copy files to
|
||||
@ -112,13 +114,15 @@ def make_tar_archive(opts):
|
||||
|
||||
|
||||
def make_zip_archive(opts):
|
||||
'''Given the parsed options, generates the 'opt.output_filename'
|
||||
"""Generate the zip archive.
|
||||
|
||||
Given the parsed options, generates the 'opt.output_filename'
|
||||
zipfile containing all the files in 'opt.input_filename' renamed
|
||||
according to the mappings in 'opts.transformations'.
|
||||
|
||||
All files in 'opt.output_filename' are renamed before being
|
||||
written into the zipfile.
|
||||
'''
|
||||
"""
|
||||
archive = open_zip_archive_for_write(opts.output_filename)
|
||||
try:
|
||||
for input_filename in opts.input_filenames:
|
||||
@ -129,6 +133,7 @@ def make_zip_archive(opts):
|
||||
|
||||
|
||||
def parse_options(args):
|
||||
"""Parse program options."""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option('-o', dest='output_filename', default=None,
|
||||
help='Name of the archive to output.', metavar='FILE')
|
||||
@ -166,30 +171,34 @@ def parse_options(args):
|
||||
xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=', 1)
|
||||
for xform in opts.transformations
|
||||
]
|
||||
except Exception, e:
|
||||
parser.error(e)
|
||||
except Exception, err: # pylint: disable=broad-except
|
||||
parser.error(err)
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def open_zip_archive_for_write(filename):
|
||||
'''Open a zip archive for writing and return it.
|
||||
'''
|
||||
"""Open a zip archive for writing and return it."""
|
||||
|
||||
# Infuriatingly, Zipfile calls the "add" method "write", but they're otherwise identical,
|
||||
# for our purposes. WrappedZipFile is a minimal adapter class.
|
||||
class WrappedZipFile(zipfile.ZipFile):
|
||||
"""WrappedZipFile class."""
|
||||
|
||||
def add(self, filename, arcname):
|
||||
"""Add filename to zip."""
|
||||
return self.write(filename, arcname)
|
||||
|
||||
return WrappedZipFile(filename, 'w', zipfile.ZIP_DEFLATED)
|
||||
|
||||
|
||||
def get_preferred_filename(input_filename, transformations):
|
||||
'''Does a prefix subsitution on 'input_filename' for the
|
||||
"""Return preferred filename.
|
||||
|
||||
Perform a prefix subsitution on 'input_filename' for the
|
||||
first matching transformation in 'transformations' and
|
||||
returns the substituted string
|
||||
'''
|
||||
returns the substituted string.
|
||||
"""
|
||||
for match, replace in transformations:
|
||||
match_lower = match.lower()
|
||||
input_filename_lower = input_filename.lower()
|
||||
|
@ -1,16 +1,17 @@
|
||||
# Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015.
|
||||
# To build mongodb, you must use scons. You can use this project to navigate code during debugging.
|
||||
#
|
||||
# HOW TO USE
|
||||
#
|
||||
# First, you need a compile_commands.json file, to generate run the following command:
|
||||
# scons compiledb
|
||||
#
|
||||
# Next, run the following command
|
||||
# python buildscripts/make_vcxproj.py FILE_NAME
|
||||
#
|
||||
# where FILE_NAME is the of the file to generate e.g., "mongod"
|
||||
#
|
||||
"""Generate vcxproj and vcxproj.filters files for browsing code in Visual Studio 2015.
|
||||
|
||||
To build mongodb, you must use scons. You can use this project to navigate code during debugging.
|
||||
|
||||
HOW TO USE
|
||||
|
||||
First, you need a compile_commands.json file, to generate run the following command:
|
||||
scons compiledb
|
||||
|
||||
Next, run the following command
|
||||
python buildscripts/make_vcxproj.py FILE_NAME
|
||||
|
||||
where FILE_NAME is the of the file to generate e.g., "mongod"
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
@ -35,7 +36,7 @@ VCXPROJ_FOOTER = r"""
|
||||
|
||||
|
||||
def get_defines(args):
|
||||
"""Parse a compiler argument list looking for defines"""
|
||||
"""Parse a compiler argument list looking for defines."""
|
||||
ret = set()
|
||||
for arg in args:
|
||||
if arg.startswith('/D'):
|
||||
@ -44,7 +45,7 @@ def get_defines(args):
|
||||
|
||||
|
||||
def get_includes(args):
|
||||
"""Parse a compiler argument list looking for includes"""
|
||||
"""Parse a compiler argument list looking for includes."""
|
||||
ret = set()
|
||||
for arg in args:
|
||||
if arg.startswith('/I'):
|
||||
@ -52,10 +53,11 @@ def get_includes(args):
|
||||
return ret
|
||||
|
||||
|
||||
class ProjFileGenerator(object):
|
||||
"""Generate a .vcxproj and .vcxprof.filters file"""
|
||||
class ProjFileGenerator(object): # pylint: disable=too-many-instance-attributes
|
||||
"""Generate a .vcxproj and .vcxprof.filters file."""
|
||||
|
||||
def __init__(self, target):
|
||||
"""Initialize ProjFileGenerator."""
|
||||
# we handle DEBUG in the vcxproj header:
|
||||
self.common_defines = set()
|
||||
self.common_defines.add("DEBUG")
|
||||
@ -84,8 +86,8 @@ class ProjFileGenerator(object):
|
||||
self.vcxproj.write(header_str)
|
||||
|
||||
common_defines = self.all_defines
|
||||
for c in self.compiles:
|
||||
common_defines = common_defines.intersection(c['defines'])
|
||||
for comp in self.compiles:
|
||||
common_defines = common_defines.intersection(comp['defines'])
|
||||
|
||||
self.vcxproj.write("<!-- common_defines -->\n")
|
||||
self.vcxproj.write("<ItemDefinitionGroup><ClCompile><PreprocessorDefinitions>" +
|
||||
@ -95,7 +97,7 @@ class ProjFileGenerator(object):
|
||||
self.vcxproj.write(" <ItemGroup>\n")
|
||||
for command in self.compiles:
|
||||
defines = command["defines"].difference(common_defines)
|
||||
if len(defines) > 0:
|
||||
if defines:
|
||||
self.vcxproj.write(
|
||||
" <ClCompile Include=\"" + command["file"] + "\"><PreprocessorDefinitions>" +
|
||||
';'.join(defines) + ";%(PreprocessorDefinitions)" +
|
||||
@ -118,12 +120,12 @@ class ProjFileGenerator(object):
|
||||
self.filters.close()
|
||||
|
||||
def parse_line(self, line):
|
||||
"""Parse a build line"""
|
||||
"""Parse a build line."""
|
||||
if line.startswith("cl"):
|
||||
self.__parse_cl_line(line[3:])
|
||||
|
||||
def __parse_cl_line(self, line):
|
||||
"""Parse a compiler line"""
|
||||
"""Parse a compiler line."""
|
||||
# Get the file we are compilong
|
||||
file_name = re.search(r"/c ([\w\\.-]+) ", line).group(1)
|
||||
|
||||
@ -146,16 +148,17 @@ class ProjFileGenerator(object):
|
||||
|
||||
self.compiles.append({"file": file_name, "defines": file_defines})
|
||||
|
||||
def __is_header(self, name):
|
||||
"""Is this a header file?"""
|
||||
@staticmethod
|
||||
def __is_header(name):
|
||||
"""Return True if this a header file."""
|
||||
headers = [".h", ".hpp", ".hh", ".hxx"]
|
||||
for header in headers:
|
||||
if name.endswith(header):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __write_filters(self):
|
||||
"""Generate the vcxproj.filters file"""
|
||||
def __write_filters(self): # pylint: disable=too-many-branches
|
||||
"""Generate the vcxproj.filters file."""
|
||||
# 1. get a list of directories for all the files
|
||||
# 2. get all the headers in each of these dirs
|
||||
# 3. Output these lists of files to vcxproj and vcxproj.headers
|
||||
@ -191,7 +194,7 @@ class ProjFileGenerator(object):
|
||||
for directory in dirs:
|
||||
if os.path.exists(directory):
|
||||
for file_name in os.listdir(directory):
|
||||
if "SConstruct" == file_name or "SConscript" in file_name:
|
||||
if file_name == "SConstruct" or "SConscript" in file_name:
|
||||
scons_files.add(directory + "\\" + file_name)
|
||||
scons_files.add("SConstruct")
|
||||
|
||||
@ -244,6 +247,7 @@ class ProjFileGenerator(object):
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
if len(sys.argv) != 2:
|
||||
print r"Usage: python buildscripts\make_vcxproj.py FILE_NAME"
|
||||
return
|
||||
|
@ -26,7 +26,7 @@ MongoDB SConscript files do.
|
||||
from __future__ import print_function
|
||||
|
||||
__all__ = ('discover_modules', 'discover_module_directories', 'configure_modules',
|
||||
'register_module_test')
|
||||
'register_module_test') # pylint: disable=undefined-all-variable
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
@ -34,9 +34,9 @@ import os
|
||||
|
||||
|
||||
def discover_modules(module_root, allowed_modules):
|
||||
"""Scans module_root for subdirectories that look like MongoDB modules.
|
||||
"""Scan module_root for subdirectories that look like MongoDB modules.
|
||||
|
||||
Returns a list of imported build.py module objects.
|
||||
Return a list of imported build.py module objects.
|
||||
"""
|
||||
found_modules = []
|
||||
|
||||
@ -74,9 +74,9 @@ def discover_modules(module_root, allowed_modules):
|
||||
|
||||
|
||||
def discover_module_directories(module_root, allowed_modules):
|
||||
"""Scans module_root for subdirectories that look like MongoDB modules.
|
||||
"""Scan module_root for subdirectories that look like MongoDB modules.
|
||||
|
||||
Returns a list of directory names.
|
||||
Return a list of directory names.
|
||||
"""
|
||||
if not os.path.isdir(module_root):
|
||||
return []
|
||||
@ -105,23 +105,23 @@ def discover_module_directories(module_root, allowed_modules):
|
||||
|
||||
|
||||
def configure_modules(modules, conf):
|
||||
""" Run the configure() function in the build.py python modules for each module in "modules"
|
||||
(as created by discover_modules).
|
||||
"""Run the configure() function in the build.py python modules for each module in "modules".
|
||||
|
||||
The modules were created by discover_modules.
|
||||
|
||||
The configure() function should prepare the Mongo build system for building the module.
|
||||
"""
|
||||
for module in modules:
|
||||
name = module.name
|
||||
print("configuring module: %s" % (name))
|
||||
|
||||
root = os.path.dirname(module.__file__)
|
||||
module.configure(conf, conf.env)
|
||||
|
||||
|
||||
def get_module_sconscripts(modules):
|
||||
"""Return all modules' sconscripts."""
|
||||
sconscripts = []
|
||||
for m in modules:
|
||||
module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(m.__file__)))
|
||||
for mod in modules:
|
||||
module_dir_path = __get_src_relative_path(os.path.join(os.path.dirname(mod.__file__)))
|
||||
sconscripts.append(os.path.join(module_dir_path, 'SConscript'))
|
||||
return sconscripts
|
||||
|
||||
@ -142,8 +142,9 @@ def __get_src_relative_path(path):
|
||||
|
||||
|
||||
def __get_module_path(module_frame_depth):
|
||||
"""Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames
|
||||
above this function, relative to the "src" directory.
|
||||
"""Return the path to the MongoDB module whose build.py is executing "module_frame_depth" frames.
|
||||
|
||||
This is above this function, relative to the "src" directory.
|
||||
"""
|
||||
module_filename = inspect.stack()[module_frame_depth + 1][1]
|
||||
return os.path.dirname(__get_src_relative_path(module_filename))
|
||||
@ -185,8 +186,9 @@ def get_current_module_build_path():
|
||||
|
||||
|
||||
def get_current_module_libdep_name(libdep_rel_path):
|
||||
"""Return a $BUILD_DIR relative path to a "libdep_rel_path", where "libdep_rel_path"
|
||||
is specified relative to the MongoDB module's build.py file.
|
||||
"""Return a $BUILD_DIR relative path to a "libdep_rel_path".
|
||||
|
||||
The "libdep_rel_path" is relative to the MongoDB module's build.py file.
|
||||
|
||||
May only meaningfully be called from within build.py
|
||||
"""
|
||||
|
@ -24,9 +24,9 @@ import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hint=None):
|
||||
"""Given a trace_doc in MongoDB stack dump format, returns a list of symbolized stack frames.
|
||||
"""
|
||||
def symbolize_frames( # pylint: disable=too-many-locals
|
||||
trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hint=None):
|
||||
"""Return a list of symbolized stack frames from a trace_doc in MongoDB stack dump format."""
|
||||
|
||||
if symbolizer_path is None:
|
||||
symbolizer_path = os.environ.get("MONGOSYMB_SYMBOLIZER_PATH", "llvm-symbolizer")
|
||||
@ -34,8 +34,9 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
|
||||
dsym_hint = []
|
||||
|
||||
def make_base_addr_map(somap_list):
|
||||
"""Makes a map from binary load address to description of library from the somap, which is
|
||||
a list of dictionaries describing individual loaded libraries.
|
||||
"""Return map from binary load address to description of library from the somap_list.
|
||||
|
||||
The somap_list is a list of dictionaries describing individual loaded libraries.
|
||||
"""
|
||||
return {so_entry["b"]: so_entry for so_entry in somap_list if so_entry.has_key("b")}
|
||||
|
||||
@ -71,9 +72,9 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
|
||||
stderr=open("/dev/null"))
|
||||
|
||||
def extract_symbols(stdin):
|
||||
"""Extracts symbol information from the output of llvm-symbolizer.
|
||||
"""Extract symbol information from the output of llvm-symbolizer.
|
||||
|
||||
Returns a list of dictionaries, each of which has fn, file, column and line entries.
|
||||
Return a list of dictionaries, each of which has fn, file, column and line entries.
|
||||
|
||||
The format of llvm-symbolizer output is that for every CODE line of input,
|
||||
it outputs zero or more pairs of lines, and then a blank line. This way, if
|
||||
@ -109,47 +110,58 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
|
||||
return frames
|
||||
|
||||
|
||||
class path_dbg_file_resolver(object):
|
||||
class PathDbgFileResolver(object):
|
||||
"""PathDbgFileResolver class."""
|
||||
|
||||
def __init__(self, bin_path_guess):
|
||||
"""Initialize PathDbgFileResolver."""
|
||||
self._bin_path_guess = bin_path_guess
|
||||
|
||||
def get_dbg_file(self, soinfo):
|
||||
"""Return dbg file name."""
|
||||
return soinfo.get("path", self._bin_path_guess)
|
||||
|
||||
|
||||
class s3_buildid_dbg_file_resolver(object):
|
||||
class S3BuildidDbgFileResolver(object):
|
||||
"""S3BuildidDbgFileResolver class."""
|
||||
|
||||
def __init__(self, cache_dir, s3_bucket):
|
||||
"""Initialize S3BuildidDbgFileResolver."""
|
||||
self._cache_dir = cache_dir
|
||||
self._s3_bucket = s3_bucket
|
||||
|
||||
def get_dbg_file(self, soinfo):
|
||||
buildId = soinfo.get("buildId", None)
|
||||
if buildId is None:
|
||||
"""Return dbg file name."""
|
||||
build_id = soinfo.get("buildId", None)
|
||||
if build_id is None:
|
||||
return None
|
||||
buildId = buildId.lower()
|
||||
buildIdPath = os.path.join(self._cache_dir, buildId + ".debug")
|
||||
if not os.path.exists(buildIdPath):
|
||||
build_id = build_id.lower()
|
||||
build_id_path = os.path.join(self._cache_dir, build_id + ".debug")
|
||||
if not os.path.exists(build_id_path):
|
||||
try:
|
||||
self._get_from_s3(buildId)
|
||||
except:
|
||||
self._get_from_s3(build_id)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
ex = sys.exc_info()[0]
|
||||
sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" % (buildId, ex))
|
||||
sys.stderr.write("Failed to find debug symbols for %s in s3: %s\n" % (build_id, ex))
|
||||
return None
|
||||
if not os.path.exists(buildIdPath):
|
||||
if not os.path.exists(build_id_path):
|
||||
return None
|
||||
return buildIdPath
|
||||
return build_id_path
|
||||
|
||||
def _get_from_s3(self, buildId):
|
||||
def _get_from_s3(self, build_id):
|
||||
"""Download debug symbols from S3."""
|
||||
subprocess.check_call(
|
||||
['wget', 'https://s3.amazonaws.com/%s/%s.debug.gz' %
|
||||
(self._s3_bucket, buildId)], cwd=self._cache_dir)
|
||||
subprocess.check_call(['gunzip', buildId + ".debug.gz"], cwd=self._cache_dir)
|
||||
['wget',
|
||||
'https://s3.amazonaws.com/%s/%s.debug.gz' %
|
||||
(self._s3_bucket, build_id)], cwd=self._cache_dir)
|
||||
subprocess.check_call(['gunzip', build_id + ".debug.gz"], cwd=self._cache_dir)
|
||||
|
||||
|
||||
def classic_output(frames, outfile, **kwargs):
|
||||
def classic_output(frames, outfile, **kwargs): # pylint: disable=unused-argument
|
||||
"""Provide classic output."""
|
||||
for frame in frames:
|
||||
symbinfo = frame["symbinfo"]
|
||||
if len(symbinfo) > 0:
|
||||
if symbinfo:
|
||||
for sframe in symbinfo:
|
||||
outfile.write(" %(file)s:%(line)s:%(column)s: %(fn)s\n" % sframe)
|
||||
else:
|
||||
@ -157,13 +169,14 @@ def classic_output(frames, outfile, **kwargs):
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""Execute Main program."""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("--dsym-hint", action="append", dest="dsym_hint")
|
||||
parser.add_option("--symbolizer-path", dest="symbolizer_path", default=None)
|
||||
parser.add_option("--debug-file-resolver", dest="debug_file_resolver", default="path")
|
||||
parser.add_option("--output-format", dest="output_format", default="classic")
|
||||
(options, args) = parser.parse_args(argv)
|
||||
resolver_constructor = dict(path=path_dbg_file_resolver, s3=s3_buildid_dbg_file_resolver).get(
|
||||
resolver_constructor = dict(path=PathDbgFileResolver, s3=S3BuildidDbgFileResolver).get(
|
||||
options.debug_file_resolver, None)
|
||||
if resolver_constructor is None:
|
||||
sys.stderr.write("Invalid debug-file-resolver argument: %s\n" % options.debug_file_resolver)
|
||||
|
@ -1,17 +1,15 @@
|
||||
"""Script to fix up our MSI files """
|
||||
"""Script to fix up our MSI files."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import msilib
|
||||
import shutil
|
||||
|
||||
parser = argparse.ArgumentParser(description='Trim MSI.')
|
||||
parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
|
||||
parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
|
||||
|
||||
args = parser.parse_args()
|
||||
import msilib
|
||||
|
||||
|
||||
def exec_delete(query):
|
||||
def exec_delete(db, query):
|
||||
"""Execute delete on db."""
|
||||
view = db.OpenView(query)
|
||||
view.Execute(None)
|
||||
|
||||
@ -20,7 +18,8 @@ def exec_delete(query):
|
||||
view.Close()
|
||||
|
||||
|
||||
def exec_update(query, column, value):
|
||||
def exec_update(db, query, column, value):
|
||||
"""Execute update on db."""
|
||||
view = db.OpenView(query)
|
||||
view.Execute(None)
|
||||
|
||||
@ -30,23 +29,38 @@ def exec_update(query, column, value):
|
||||
view.Close()
|
||||
|
||||
|
||||
print "Trimming MSI"
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
parser = argparse.ArgumentParser(description='Trim MSI.')
|
||||
parser.add_argument('file', type=argparse.FileType('r'), help='file to trim')
|
||||
parser.add_argument('out', type=argparse.FileType('w'), help='file to output to')
|
||||
|
||||
db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
|
||||
args = parser.parse_args()
|
||||
print("Trimming MSI")
|
||||
|
||||
exec_delete(
|
||||
"select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
"select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
"select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
"select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
|
||||
)
|
||||
db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
|
||||
|
||||
db.Commit()
|
||||
exec_delete(
|
||||
db,
|
||||
"select * from ControlEvent WHERE Dialog_ = 'LicenseAgreementDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
db,
|
||||
"select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'LicenseAgreementDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
db,
|
||||
"select * from ControlEvent WHERE Dialog_ = 'CustomizeDlg' AND Control_ = 'Next' AND Event = 'NewDialog' AND Argument = 'VerifyReadyDlg'"
|
||||
)
|
||||
exec_delete(
|
||||
db,
|
||||
"select * from ControlEvent WHERE Dialog_ = 'VerifyReadyDlg' AND Control_ = 'Back' AND Event = 'NewDialog' AND Argument = 'CustomizeDlg'"
|
||||
)
|
||||
|
||||
shutil.copyfile(args.file.name, args.out.name)
|
||||
db.Commit()
|
||||
|
||||
shutil.copyfile(args.file.name, args.out.name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,30 +1,33 @@
|
||||
#!/usr/bin/env python
|
||||
"""Packager module.
|
||||
|
||||
# This program makes Debian and RPM repositories for MongoDB, by
|
||||
# downloading our tarballs of statically linked executables and
|
||||
# insinuating them into Linux packages. It must be run on a
|
||||
# Debianoid, since Debian provides tools to make RPMs, but RPM-based
|
||||
# systems don't provide debian packaging crud.
|
||||
This program makes Debian and RPM repositories for MongoDB, by
|
||||
downloading our tarballs of statically linked executables and
|
||||
insinuating them into Linux packages. It must be run on a
|
||||
Debianoid, since Debian provides tools to make RPMs, but RPM-based
|
||||
systems don't provide debian packaging crud.
|
||||
|
||||
# Notes:
|
||||
#
|
||||
# * Almost anything that you want to be able to influence about how a
|
||||
# package construction must be embedded in some file that the
|
||||
# packaging tool uses for input (e.g., debian/rules, debian/control,
|
||||
# debian/changelog; or the RPM specfile), and the precise details are
|
||||
# arbitrary and silly. So this program generates all the relevant
|
||||
# inputs to the packaging tools.
|
||||
#
|
||||
# * Once a .deb or .rpm package is made, there's a separate layer of
|
||||
# tools that makes a "repository" for use by the apt/yum layers of
|
||||
# package tools. The layouts of these repositories are arbitrary and
|
||||
# silly, too.
|
||||
#
|
||||
# * Before you run the program on a new host, these are the
|
||||
# prerequisites:
|
||||
#
|
||||
# apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core
|
||||
# echo "Now put the dist gnupg signing keys in ~root/.gnupg"
|
||||
Notes
|
||||
-----
|
||||
* Almost anything that you want to be able to influence about how a
|
||||
package construction must be embedded in some file that the
|
||||
packaging tool uses for input (e.g., debian/rules, debian/control,
|
||||
debian/changelog; or the RPM specfile), and the precise details are
|
||||
arbitrary and silly. So this program generates all the relevant
|
||||
inputs to the packaging tools.
|
||||
|
||||
* Once a .deb or .rpm package is made, there's a separate layer of
|
||||
tools that makes a "repository" for use by the apt/yum layers of
|
||||
package tools. The layouts of these repositories are arbitrary and
|
||||
silly, too.
|
||||
|
||||
* Before you run the program on a new host, these are the
|
||||
prerequisites:
|
||||
|
||||
apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core
|
||||
echo "Now put the dist gnupg signing keys in ~root/.gnupg"
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
@ -45,7 +48,10 @@ DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon", "amazon2"]
|
||||
|
||||
|
||||
class Spec(object):
|
||||
"""Spec class."""
|
||||
|
||||
def __init__(self, ver, gitspec=None, rel=None):
|
||||
"""Initialize Spec."""
|
||||
self.ver = ver
|
||||
self.gitspec = gitspec
|
||||
self.rel = rel
|
||||
@ -54,44 +60,53 @@ class Spec(object):
|
||||
# Patch builds version numbers are in the form: 3.5.5-64-g03945fa-patch-58debcdb3ff1223c9d00005b
|
||||
#
|
||||
def is_nightly(self):
|
||||
"""Return True if nightly."""
|
||||
return bool(re.search("-$", self.version())) or bool(
|
||||
re.search("\d-\d+-g[0-9a-f]+$", self.version()))
|
||||
re.search(r"\d-\d+-g[0-9a-f]+$", self.version()))
|
||||
|
||||
def is_patch(self):
|
||||
return bool(re.search("\d-\d+-g[0-9a-f]+-patch-[0-9a-f]+$", self.version()))
|
||||
"""Return True if patch."""
|
||||
return bool(re.search(r"\d-\d+-g[0-9a-f]+-patch-[0-9a-f]+$", self.version()))
|
||||
|
||||
def is_rc(self):
|
||||
return bool(re.search("-rc\d+$", self.version()))
|
||||
"""Return True if rc."""
|
||||
return bool(re.search(r"-rc\d+$", self.version()))
|
||||
|
||||
def is_pre_release(self):
|
||||
"""Return True if pre-release."""
|
||||
return self.is_rc() or self.is_nightly()
|
||||
|
||||
def version(self):
|
||||
"""Return version."""
|
||||
return self.ver
|
||||
|
||||
def patch_id(self):
|
||||
"""Return patch id."""
|
||||
if self.is_patch():
|
||||
return re.sub(r'.*-([0-9a-f]+$)', r'\1', self.version())
|
||||
else:
|
||||
return "none"
|
||||
return "none"
|
||||
|
||||
def metadata_gitspec(self):
|
||||
"""Git revision to use for spec+control+init+manpage files.
|
||||
The default is the release tag for the version being packaged."""
|
||||
if (self.gitspec):
|
||||
|
||||
The default is the release tag for the version being packaged.
|
||||
"""
|
||||
if self.gitspec:
|
||||
return self.gitspec
|
||||
else:
|
||||
return 'r' + self.version()
|
||||
return 'r' + self.version()
|
||||
|
||||
def version_better_than(self, version_string):
|
||||
"""Return True if 'version_string' is greater than instance version."""
|
||||
# FIXME: this is wrong, but I'm in a hurry.
|
||||
# e.g., "1.8.2" < "1.8.10", "1.8.2" < "1.8.2-rc1"
|
||||
return self.ver > version_string
|
||||
|
||||
def suffix(self):
|
||||
"""Return suffix."""
|
||||
return "-org" if int(self.ver.split(".")[1]) % 2 == 0 else "-org-unstable"
|
||||
|
||||
def prelease(self):
|
||||
"""Return pre-release verison suffix."""
|
||||
# NOTE: This is only called for RPM packages, and only after
|
||||
# pversion() below has been called. If you want to change this format
|
||||
# and want DEB packages to match, make sure to update pversion()
|
||||
@ -114,10 +129,10 @@ class Spec(object):
|
||||
return "0.%s.latest" % (corenum)
|
||||
elif self.is_patch():
|
||||
return "0.%s.patch.%s" % (corenum, self.patch_id())
|
||||
else:
|
||||
return str(corenum)
|
||||
return str(corenum)
|
||||
|
||||
def pversion(self, distro):
|
||||
"""Return the pversion."""
|
||||
# Note: Debian packages have funny rules about dashes in
|
||||
# version numbers, and RPM simply forbids dashes. pversion
|
||||
# will be the package's version number (but we need to know
|
||||
@ -146,27 +161,36 @@ class Spec(object):
|
||||
|
||||
def branch(self):
|
||||
"""Return the major and minor portions of the specified version.
|
||||
|
||||
For example, if the version is "2.5.5" the branch would be "2.5"
|
||||
"""
|
||||
return ".".join(self.ver.split(".")[0:2])
|
||||
|
||||
|
||||
class Distro(object):
|
||||
"""Distro class."""
|
||||
|
||||
def __init__(self, string):
|
||||
self.n = string
|
||||
"""Initialize Distro."""
|
||||
self.dname = string
|
||||
|
||||
def name(self):
|
||||
return self.n
|
||||
"""Return name."""
|
||||
return self.dname
|
||||
|
||||
def pkgbase(self):
|
||||
@staticmethod
|
||||
def pkgbase():
|
||||
"""Return pkgbase."""
|
||||
return "mongodb"
|
||||
|
||||
def archname(self, arch):
|
||||
"""Return the packaging system's architecture name.
|
||||
|
||||
Power and x86 have different names for apt/yum (ppc64le/ppc64el
|
||||
and x86_64/amd64)
|
||||
and x86_64/amd64).
|
||||
"""
|
||||
if re.search("^(debian|ubuntu)", self.n):
|
||||
# pylint: disable=too-many-return-statements
|
||||
if re.search("^(debian|ubuntu)", self.dname):
|
||||
if arch == "ppc64le":
|
||||
return "ppc64el"
|
||||
elif arch == "s390x":
|
||||
@ -175,23 +199,23 @@ class Distro(object):
|
||||
return "arm64"
|
||||
elif arch.endswith("86"):
|
||||
return "i386"
|
||||
else:
|
||||
return "amd64"
|
||||
elif re.search("^(suse|centos|redhat|fedora|amazon)", self.n):
|
||||
return "amd64"
|
||||
elif re.search("^(suse|centos|redhat|fedora|amazon)", self.dname):
|
||||
if arch == "ppc64le":
|
||||
return "ppc64le"
|
||||
elif arch == "s390x":
|
||||
return "s390x"
|
||||
elif arch.endswith("86"):
|
||||
return "i686"
|
||||
else:
|
||||
return "x86_64"
|
||||
return "x86_64"
|
||||
else:
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
# pylint: enable=too-many-return-statements
|
||||
|
||||
def repodir(self, arch, build_os, spec):
|
||||
"""Return the directory where we'll place the package files for
|
||||
(distro, distro_version) in that distro's preferred repository
|
||||
def repodir(self, arch, build_os, spec): # noqa: D406,D407,D412,D413
|
||||
"""Return the directory where we'll place the package files for (distro, distro_version).
|
||||
|
||||
This is in that distro's preferred repository
|
||||
layout (as distinct from where that distro's packaging building
|
||||
tools place the package files).
|
||||
|
||||
@ -211,7 +235,6 @@ class Distro(object):
|
||||
|
||||
repo/zypper/suse/11/mongodb-org/2.5/x86_64
|
||||
zypper/suse/11/mongodb-org/2.5/i386
|
||||
|
||||
"""
|
||||
|
||||
repo_directory = ""
|
||||
@ -221,16 +244,16 @@ class Distro(object):
|
||||
else:
|
||||
repo_directory = spec.branch()
|
||||
|
||||
if re.search("^(debian|ubuntu)", self.n):
|
||||
if re.search("^(debian|ubuntu)", self.dname):
|
||||
return "repo/apt/%s/dists/%s/mongodb-org/%s/%s/binary-%s/" % (
|
||||
self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
|
||||
self.dname, self.repo_os_version(build_os), repo_directory, self.repo_component(),
|
||||
self.archname(arch))
|
||||
elif re.search("(redhat|fedora|centos|amazon)", self.n):
|
||||
return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
|
||||
elif re.search("(redhat|fedora|centos|amazon)", self.dname):
|
||||
return "repo/yum/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
|
||||
self.repo_os_version(build_os),
|
||||
repo_directory, self.archname(arch))
|
||||
elif re.search("(suse)", self.n):
|
||||
return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.n,
|
||||
elif re.search("(suse)", self.dname):
|
||||
return "repo/zypper/%s/%s/mongodb-org/%s/%s/RPMS/" % (self.dname,
|
||||
self.repo_os_version(build_os),
|
||||
repo_directory,
|
||||
self.archname(arch))
|
||||
@ -238,28 +261,33 @@ class Distro(object):
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
|
||||
def repo_component(self):
|
||||
"""Return the name of the section/component/pool we are publishing into -
|
||||
e.g. "multiverse" for Ubuntu, "main" for debian."""
|
||||
if self.n == 'ubuntu':
|
||||
"""Return the name of the section/component/pool we are publishing into.
|
||||
|
||||
Example, "multiverse" for Ubuntu, "main" for debian.
|
||||
"""
|
||||
if self.dname == 'ubuntu':
|
||||
return "multiverse"
|
||||
elif self.n == 'debian':
|
||||
elif self.dname == 'debian':
|
||||
return "main"
|
||||
else:
|
||||
raise Exception("unsupported distro: %s" % self.n)
|
||||
raise Exception("unsupported distro: %s" % self.dname)
|
||||
|
||||
def repo_os_version(self, build_os):
|
||||
"""Return an OS version suitable for package repo directory
|
||||
naming - e.g. 5, 6 or 7 for redhat/centos, "precise," "wheezy," etc.
|
||||
for Ubuntu/Debian, 11 for suse, "2013.03" for amazon"""
|
||||
if self.n == 'suse':
|
||||
def repo_os_version(self, build_os): # pylint: disable=too-many-branches
|
||||
"""Return an OS version suitable for package repo directory naming.
|
||||
|
||||
Example, 5, 6 or 7 for redhat/centos, "precise," "wheezy," etc.
|
||||
for Ubuntu/Debian, 11 for suse, "2013.03" for amazon.
|
||||
"""
|
||||
# pylint: disable=too-many-return-statements
|
||||
if self.dname == 'suse':
|
||||
return re.sub(r'^suse(\d+)$', r'\1', build_os)
|
||||
if self.n == 'redhat':
|
||||
if self.dname == 'redhat':
|
||||
return re.sub(r'^rhel(\d).*$', r'\1', build_os)
|
||||
if self.n == 'amazon':
|
||||
if self.dname == 'amazon':
|
||||
return "2013.03"
|
||||
elif self.n == 'amazon2':
|
||||
elif self.dname == 'amazon2':
|
||||
return "2017.12"
|
||||
elif self.n == 'ubuntu':
|
||||
elif self.dname == 'ubuntu':
|
||||
if build_os == 'ubuntu1204':
|
||||
return "precise"
|
||||
elif build_os == 'ubuntu1404':
|
||||
@ -268,7 +296,7 @@ class Distro(object):
|
||||
return "xenial"
|
||||
else:
|
||||
raise Exception("unsupported build_os: %s" % build_os)
|
||||
elif self.n == 'debian':
|
||||
elif self.dname == 'debian':
|
||||
if build_os == 'debian71':
|
||||
return 'wheezy'
|
||||
elif build_os == 'debian81':
|
||||
@ -278,53 +306,62 @@ class Distro(object):
|
||||
else:
|
||||
raise Exception("unsupported build_os: %s" % build_os)
|
||||
else:
|
||||
raise Exception("unsupported distro: %s" % self.n)
|
||||
raise Exception("unsupported distro: %s" % self.dname)
|
||||
# pylint: enable=too-many-return-statements
|
||||
|
||||
def make_pkg(self, build_os, arch, spec, srcdir):
|
||||
if re.search("^(debian|ubuntu)", self.n):
|
||||
"""Return the package."""
|
||||
if re.search("^(debian|ubuntu)", self.dname):
|
||||
return make_deb(self, build_os, arch, spec, srcdir)
|
||||
elif re.search("^(suse|centos|redhat|fedora|amazon)", self.n):
|
||||
elif re.search("^(suse|centos|redhat|fedora|amazon)", self.dname):
|
||||
return make_rpm(self, build_os, arch, spec, srcdir)
|
||||
else:
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
|
||||
def build_os(self, arch):
|
||||
"""Return the build os label in the binary package to download (e.g. "rhel55" for redhat,
|
||||
"ubuntu1204" for ubuntu, "debian71" for debian, "suse11" for suse, etc.)"""
|
||||
"""Return the build os label in the binary package to download.
|
||||
|
||||
Example, "rhel55" for redhat, "ubuntu1204" for ubuntu, "debian71" for debian,
|
||||
"suse11" for suse, etc.
|
||||
"""
|
||||
# Community builds only support amd64
|
||||
if arch not in ['x86_64', 'ppc64le', 's390x', 'arm64']:
|
||||
raise Exception("BUG: unsupported architecture (%s)" % arch)
|
||||
|
||||
if re.search("(suse)", self.n):
|
||||
if re.search("(suse)", self.dname):
|
||||
return ["suse11", "suse12"]
|
||||
elif re.search("(redhat|fedora|centos)", self.n):
|
||||
elif re.search("(redhat|fedora|centos)", self.dname):
|
||||
return ["rhel70", "rhel71", "rhel72", "rhel62", "rhel55"]
|
||||
elif self.n in ['amazon', 'amazon2']:
|
||||
return [self.n]
|
||||
elif self.n == 'ubuntu':
|
||||
elif self.dname in ['amazon', 'amazon2']:
|
||||
return [self.dname]
|
||||
elif self.dname == 'ubuntu':
|
||||
return [
|
||||
"ubuntu1204",
|
||||
"ubuntu1404",
|
||||
"ubuntu1604",
|
||||
]
|
||||
elif self.n == 'debian':
|
||||
elif self.dname == 'debian':
|
||||
return ["debian71", "debian81", "debian92"]
|
||||
else:
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
|
||||
def release_dist(self, build_os):
|
||||
"""Return the release distribution to use in the rpm - "el5" for rhel 5.x,
|
||||
"el6" for rhel 6.x, return anything else unchanged"""
|
||||
"""Return the release distribution to use in the rpm.
|
||||
|
||||
if self.n == 'amazon':
|
||||
"el5" for rhel 5.x,
|
||||
"el6" for rhel 6.x,
|
||||
return anything else unchanged.
|
||||
"""
|
||||
|
||||
if self.dname == 'amazon':
|
||||
return 'amzn1'
|
||||
elif self.n == 'amazon2':
|
||||
elif self.dname == 'amazon2':
|
||||
return 'amzn2'
|
||||
else:
|
||||
return re.sub(r'^rh(el\d).*$', r'\1', build_os)
|
||||
return re.sub(r'^rh(el\d).*$', r'\1', build_os)
|
||||
|
||||
|
||||
def get_args(distros, arch_choices):
|
||||
"""Return the program arguments."""
|
||||
|
||||
distro_choices = []
|
||||
for distro in distros:
|
||||
@ -354,7 +391,8 @@ def get_args(distros, arch_choices):
|
||||
return args
|
||||
|
||||
|
||||
def main(argv):
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
|
||||
distros = [Distro(distro) for distro in DISTROS]
|
||||
|
||||
@ -386,26 +424,25 @@ def main(argv):
|
||||
shutil.copyfile(args.tarball, filename)
|
||||
|
||||
repo = make_package(distro, build_os, arch, spec, srcdir)
|
||||
make_repo(repo, distro, build_os, spec)
|
||||
make_repo(repo, distro, build_os)
|
||||
|
||||
finally:
|
||||
os.chdir(oldcwd)
|
||||
|
||||
|
||||
def crossproduct(*seqs):
|
||||
"""A generator for iterating all the tuples consisting of elements
|
||||
of seqs."""
|
||||
l = len(seqs)
|
||||
if l == 0:
|
||||
"""Provide a generator for iterating all the tuples consisting of elements of seqs."""
|
||||
num_seqs = len(seqs)
|
||||
if num_seqs == 0:
|
||||
pass
|
||||
elif l == 1:
|
||||
for i in seqs[0]:
|
||||
yield [i]
|
||||
elif num_seqs == 1:
|
||||
for idx in seqs[0]:
|
||||
yield [idx]
|
||||
else:
|
||||
for lst in crossproduct(*seqs[:-1]):
|
||||
for i in seqs[-1]:
|
||||
for idx in seqs[-1]:
|
||||
lst2 = list(lst)
|
||||
lst2.append(i)
|
||||
lst2.append(idx)
|
||||
yield lst2
|
||||
|
||||
|
||||
@ -414,7 +451,7 @@ def sysassert(argv):
|
||||
print "In %s, running %s" % (os.getcwd(), " ".join(argv))
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
assert (subprocess.Popen(argv).wait() == 0)
|
||||
assert subprocess.Popen(argv).wait() == 0
|
||||
|
||||
|
||||
def backtick(argv):
|
||||
@ -426,12 +463,12 @@ def backtick(argv):
|
||||
|
||||
|
||||
def tarfile(build_os, arch, spec):
|
||||
"""Return the location where we store the downloaded tarball for
|
||||
this package"""
|
||||
"""Return the location where we store the downloaded tarball for this package."""
|
||||
return "dl/mongodb-linux-%s-%s-%s.tar.gz" % (spec.version(), build_os, arch)
|
||||
|
||||
|
||||
def setupdir(distro, build_os, arch, spec):
|
||||
"""Return the setup directory name."""
|
||||
# The setupdir will be a directory containing all inputs to the
|
||||
# distro's packaging tools (e.g., package metadata files, init
|
||||
# scripts, etc), along with the already-built binaries). In case
|
||||
@ -466,9 +503,10 @@ def unpack_binaries_into(build_os, arch, spec, where):
|
||||
|
||||
|
||||
def make_package(distro, build_os, arch, spec, srcdir):
|
||||
"""Construct the package for (arch, distro, spec), getting
|
||||
packaging files from srcdir and any user-specified suffix from
|
||||
suffixes"""
|
||||
"""Construct the package for (arch, distro, spec).
|
||||
|
||||
Get the packaging files from srcdir and any user-specified suffix from suffixes.
|
||||
"""
|
||||
|
||||
sdir = setupdir(distro, build_os, arch, spec)
|
||||
ensure_dir(sdir)
|
||||
@ -494,9 +532,10 @@ def make_package(distro, build_os, arch, spec, srcdir):
|
||||
return distro.make_pkg(build_os, arch, spec, srcdir)
|
||||
|
||||
|
||||
def make_repo(repodir, distro, build_os, spec):
|
||||
def make_repo(repodir, distro, build_os):
|
||||
"""Make the repo."""
|
||||
if re.search("(debian|ubuntu)", repodir):
|
||||
make_deb_repo(repodir, distro, build_os, spec)
|
||||
make_deb_repo(repodir, distro, build_os)
|
||||
elif re.search("(suse|centos|redhat|fedora|amazon)", repodir):
|
||||
make_rpm_repo(repodir)
|
||||
else:
|
||||
@ -504,6 +543,7 @@ def make_repo(repodir, distro, build_os, spec):
|
||||
|
||||
|
||||
def make_deb(distro, build_os, arch, spec, srcdir):
|
||||
"""Make the Debian script."""
|
||||
# I can't remember the details anymore, but the initscript/upstart
|
||||
# job files' names must match the package name in some way; and
|
||||
# see also the --name flag to dh_installinit in the generated
|
||||
@ -559,15 +599,16 @@ def make_deb(distro, build_os, arch, spec, srcdir):
|
||||
sysassert(["dpkg-buildpackage", "-uc", "-us", "-a" + distro_arch])
|
||||
finally:
|
||||
os.chdir(oldcwd)
|
||||
r = distro.repodir(arch, build_os, spec)
|
||||
ensure_dir(r)
|
||||
repo_dir = distro.repodir(arch, build_os, spec)
|
||||
ensure_dir(repo_dir)
|
||||
# FIXME: see if shutil.copyfile or something can do this without
|
||||
# much pain.
|
||||
sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\"" % (sdir, r)])
|
||||
return r
|
||||
sysassert(["sh", "-c", "cp -v \"%s/../\"*.deb \"%s\"" % (sdir, repo_dir)])
|
||||
return repo_dir
|
||||
|
||||
|
||||
def make_deb_repo(repo, distro, build_os, spec):
|
||||
def make_deb_repo(repo, distro, build_os):
|
||||
"""Make the Debian repo."""
|
||||
# Note: the Debian repository Packages files must be generated
|
||||
# very carefully in order to be usable.
|
||||
oldpwd = os.getcwd()
|
||||
@ -575,19 +616,19 @@ def make_deb_repo(repo, distro, build_os, spec):
|
||||
try:
|
||||
dirs = set(
|
||||
[os.path.dirname(deb)[2:] for deb in backtick(["find", ".", "-name", "*.deb"]).split()])
|
||||
for d in dirs:
|
||||
s = backtick(["dpkg-scanpackages", d, "/dev/null"])
|
||||
with open(d + "/Packages", "w") as f:
|
||||
f.write(s)
|
||||
b = backtick(["gzip", "-9c", d + "/Packages"])
|
||||
with open(d + "/Packages.gz", "wb") as f:
|
||||
f.write(b)
|
||||
for directory in dirs:
|
||||
st = backtick(["dpkg-scanpackages", directory, "/dev/null"])
|
||||
with open(directory + "/Packages", "w") as fh:
|
||||
fh.write(st)
|
||||
bt = backtick(["gzip", "-9c", directory + "/Packages"])
|
||||
with open(directory + "/Packages.gz", "wb") as fh:
|
||||
fh.write(bt)
|
||||
finally:
|
||||
os.chdir(oldpwd)
|
||||
# Notes: the Release{,.gpg} files must live in a special place,
|
||||
# and must be created after all the Packages.gz files have been
|
||||
# done.
|
||||
s = """Origin: mongodb
|
||||
s1 = """Origin: mongodb
|
||||
Label: mongodb
|
||||
Suite: %s
|
||||
Codename: %s/mongodb-org
|
||||
@ -603,14 +644,15 @@ Description: MongoDB packages
|
||||
os.chdir(repo + "../../")
|
||||
s2 = backtick(["apt-ftparchive", "release", "."])
|
||||
try:
|
||||
with open("Release", 'w') as f:
|
||||
f.write(s)
|
||||
f.write(s2)
|
||||
with open("Release", 'w') as fh:
|
||||
fh.write(s1)
|
||||
fh.write(s2)
|
||||
finally:
|
||||
os.chdir(oldpwd)
|
||||
|
||||
|
||||
def move_repos_into_place(src, dst):
|
||||
def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
|
||||
"""Move the repos into place."""
|
||||
# Find all the stuff in src/*, move it to a freshly-created
|
||||
# directory beside dst, then play some games with symlinks so that
|
||||
# dst is a name the new stuff and dst+".old" names the previous
|
||||
@ -633,8 +675,8 @@ def move_repos_into_place(src, dst):
|
||||
i = i + 1
|
||||
|
||||
# Put the stuff in our new directory.
|
||||
for r in os.listdir(src):
|
||||
sysassert(["cp", "-rv", src + "/" + r, dname])
|
||||
for src_file in os.listdir(src):
|
||||
sysassert(["cp", "-rv", src + "/" + src_file, dname])
|
||||
|
||||
# Make a symlink to the new directory; the symlink will be renamed
|
||||
# to dst shortly.
|
||||
@ -675,30 +717,31 @@ def move_repos_into_place(src, dst):
|
||||
|
||||
|
||||
def write_debian_changelog(path, spec, srcdir):
|
||||
"""Write the debian changelog."""
|
||||
oldcwd = os.getcwd()
|
||||
os.chdir(srcdir)
|
||||
preamble = ""
|
||||
try:
|
||||
s = preamble + backtick(
|
||||
sb = preamble + backtick(
|
||||
["sh", "-c",
|
||||
"git archive %s debian/changelog | tar xOf -" % spec.metadata_gitspec()])
|
||||
finally:
|
||||
os.chdir(oldcwd)
|
||||
lines = s.split("\n")
|
||||
lines = sb.split("\n")
|
||||
# If the first line starts with "mongodb", it's not a revision
|
||||
# preamble, and so frob the version number.
|
||||
lines[0] = re.sub("^mongodb \\(.*\\)", "mongodb (%s)" % (spec.pversion(Distro("debian"))),
|
||||
lines[0])
|
||||
# Rewrite every changelog entry starting in mongodb<space>
|
||||
lines = [re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), l) for l in lines]
|
||||
lines = [re.sub("^ --", " --", l) for l in lines]
|
||||
s = "\n".join(lines)
|
||||
with open(path, 'w') as f:
|
||||
f.write(s)
|
||||
lines = [re.sub("^mongodb ", "mongodb%s " % (spec.suffix()), line) for line in lines]
|
||||
lines = [re.sub("^ --", " --", line) for line in lines]
|
||||
sb = "\n".join(lines)
|
||||
with open(path, 'w') as fh:
|
||||
fh.write(sb)
|
||||
|
||||
|
||||
def make_rpm(distro, build_os, arch, spec, srcdir):
|
||||
# Create the specfile.
|
||||
def make_rpm(distro, build_os, arch, spec, srcdir): # pylint: disable=too-many-locals
|
||||
"""Create the RPM specfile."""
|
||||
suffix = spec.suffix()
|
||||
sdir = setupdir(distro, build_os, arch, spec)
|
||||
|
||||
@ -757,7 +800,7 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
|
||||
macropath = os.getcwd() + "/macros"
|
||||
|
||||
write_rpm_macros_file(macropath, topdir, distro.release_dist(build_os))
|
||||
if len(macrofiles) > 0:
|
||||
if macrofiles:
|
||||
macrofiles = macrofiles[0] + ":" + macropath
|
||||
rcfile = os.getcwd() + "/rpmrc"
|
||||
write_rpmrc_file(rcfile, macrofiles)
|
||||
@ -801,15 +844,16 @@ def make_rpm(distro, build_os, arch, spec, srcdir):
|
||||
])
|
||||
sysassert(["rpmbuild", "-ba", "--target", distro_arch] + flags +
|
||||
["%s/SPECS/mongodb%s.spec" % (topdir, suffix)])
|
||||
r = distro.repodir(arch, build_os, spec)
|
||||
ensure_dir(r)
|
||||
repo_dir = distro.repodir(arch, build_os, spec)
|
||||
ensure_dir(repo_dir)
|
||||
# FIXME: see if some combination of shutil.copy<hoohah> and glob
|
||||
# can do this without shelling out.
|
||||
sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\"" % (topdir, distro_arch, r)])
|
||||
return r
|
||||
sysassert(["sh", "-c", "cp -v \"%s/RPMS/%s/\"*.rpm \"%s\"" % (topdir, distro_arch, repo_dir)])
|
||||
return repo_dir
|
||||
|
||||
|
||||
def make_rpm_repo(repo):
|
||||
"""Make the RPM repo."""
|
||||
oldpwd = os.getcwd()
|
||||
os.chdir(repo + "../")
|
||||
try:
|
||||
@ -819,20 +863,21 @@ def make_rpm_repo(repo):
|
||||
|
||||
|
||||
def write_rpmrc_file(path, string):
|
||||
with open(path, 'w') as f:
|
||||
f.write(string)
|
||||
"""Write the RPM rc file."""
|
||||
with open(path, 'w') as fh:
|
||||
fh.write(string)
|
||||
|
||||
|
||||
def write_rpm_macros_file(path, topdir, release_dist):
|
||||
with open(path, 'w') as f:
|
||||
f.write("%%_topdir %s\n" % topdir)
|
||||
f.write("%%dist .%s\n" % release_dist)
|
||||
f.write("%_use_internal_dependency_generator 0\n")
|
||||
"""Write the RPM macros file."""
|
||||
with open(path, 'w') as fh:
|
||||
fh.write("%%_topdir %s\n" % topdir)
|
||||
fh.write("%%dist .%s\n" % release_dist)
|
||||
fh.write("%_use_internal_dependency_generator 0\n")
|
||||
|
||||
|
||||
def ensure_dir(filename):
|
||||
"""Make sure that the directory that's the dirname part of
|
||||
filename exists, and return filename."""
|
||||
"""Ensure that the dirname directory of filename exists, and return filename."""
|
||||
dirpart = os.path.dirname(filename)
|
||||
try:
|
||||
os.makedirs(dirpart)
|
||||
@ -846,12 +891,12 @@ def ensure_dir(filename):
|
||||
|
||||
|
||||
def is_valid_file(parser, filename):
|
||||
"""Check if file exists, and return the filename"""
|
||||
"""Check if file exists, and return the filename."""
|
||||
if not os.path.exists(filename):
|
||||
parser.error("The file %s does not exist!" % filename)
|
||||
else:
|
||||
return filename
|
||||
return None
|
||||
return filename
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
main()
|
||||
|
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""Packager Enterprise module."""
|
||||
|
||||
# This program makes Debian and RPM repositories for MongoDB, by
|
||||
# downloading our tarballs of statically linked executables and
|
||||
@ -26,20 +27,16 @@
|
||||
# apt-get install dpkg-dev rpm debhelper fakeroot ia32-libs createrepo git-core libsnmp15
|
||||
# echo "Now put the dist gnupg signing keys in ~root/.gnupg"
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import getopt
|
||||
from glob import glob
|
||||
import packager
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import urlparse
|
||||
|
||||
import packager # pylint: disable=relative-import
|
||||
|
||||
# The MongoDB names for the architectures we support.
|
||||
ARCH_CHOICES = ["x86_64", "ppc64le", "s390x", "arm64"]
|
||||
@ -49,20 +46,26 @@ DISTROS = ["suse", "debian", "redhat", "ubuntu", "amazon", "amazon2"]
|
||||
|
||||
|
||||
class EnterpriseSpec(packager.Spec):
|
||||
"""EnterpriseSpec class."""
|
||||
|
||||
def suffix(self):
|
||||
"""Suffix."""
|
||||
return "-enterprise" if int(self.ver.split(".")[1]) % 2 == 0 else "-enterprise-unstable"
|
||||
|
||||
|
||||
class EnterpriseDistro(packager.Distro):
|
||||
def repodir(self, arch, build_os, spec):
|
||||
"""Return the directory where we'll place the package files for
|
||||
(distro, distro_version) in that distro's preferred repository
|
||||
"""EnterpriseDistro class."""
|
||||
|
||||
def repodir(self, arch, build_os, spec): # noqa: D406,D407,D412,D413
|
||||
"""Return the directory where we'll place the package files.
|
||||
|
||||
This is for (distro, distro_version) in that distro's preferred repository
|
||||
layout (as distinct from where that distro's packaging building
|
||||
tools place the package files).
|
||||
|
||||
Packages will go into repos corresponding to the major release
|
||||
series (2.5, 2.6, 2.7, 2.8, etc.) except for RC's and nightlies
|
||||
which will go into special separate "testing" directories
|
||||
Packages will go into repos corresponding to the major release
|
||||
series (2.5, 2.6, 2.7, 2.8, etc.) except for RC's and nightlies
|
||||
which will go into special separate "testing" directories
|
||||
|
||||
Examples:
|
||||
|
||||
@ -86,7 +89,6 @@ class EnterpriseDistro(packager.Distro):
|
||||
|
||||
repo/zypper/suse/11/mongodb-enterprise/testing/x86_64
|
||||
repo/zypper/suse/11/mongodb-enterprise/testing/i386
|
||||
|
||||
"""
|
||||
|
||||
repo_directory = ""
|
||||
@ -96,52 +98,53 @@ class EnterpriseDistro(packager.Distro):
|
||||
else:
|
||||
repo_directory = spec.branch()
|
||||
|
||||
if re.search("^(debian|ubuntu)", self.n):
|
||||
if re.search("^(debian|ubuntu)", self.dname):
|
||||
return "repo/apt/%s/dists/%s/mongodb-enterprise/%s/%s/binary-%s/" % (
|
||||
self.n, self.repo_os_version(build_os), repo_directory, self.repo_component(),
|
||||
self.dname, self.repo_os_version(build_os), repo_directory, self.repo_component(),
|
||||
self.archname(arch))
|
||||
elif re.search("(redhat|fedora|centos|amazon)", self.n):
|
||||
elif re.search("(redhat|fedora|centos|amazon)", self.dname):
|
||||
return "repo/yum/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
|
||||
self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
|
||||
elif re.search("(suse)", self.n):
|
||||
self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
|
||||
elif re.search("(suse)", self.dname):
|
||||
return "repo/zypper/%s/%s/mongodb-enterprise/%s/%s/RPMS/" % (
|
||||
self.n, self.repo_os_version(build_os), repo_directory, self.archname(arch))
|
||||
self.dname, self.repo_os_version(build_os), repo_directory, self.archname(arch))
|
||||
else:
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
|
||||
def build_os(self, arch):
|
||||
"""Return the build os label in the binary package to download ("rhel57", "rhel62", "rhel67" and "rhel70"
|
||||
for redhat, the others are delegated to the super class
|
||||
def build_os(self, arch): # pylint: disable=too-many-branches
|
||||
"""Return the build os label in the binary package to download.
|
||||
|
||||
The labels "rhel57", "rhel62", "rhel67" and "rhel70" are for redhat,
|
||||
the others are delegated to the super class.
|
||||
"""
|
||||
# pylint: disable=too-many-return-statements
|
||||
if arch == "ppc64le":
|
||||
if self.n == 'ubuntu':
|
||||
if self.dname == 'ubuntu':
|
||||
return ["ubuntu1604"]
|
||||
if self.n == 'redhat':
|
||||
if self.dname == 'redhat':
|
||||
return ["rhel71"]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
if arch == "s390x":
|
||||
if self.n == 'redhat':
|
||||
if self.dname == 'redhat':
|
||||
return ["rhel67", "rhel72"]
|
||||
if self.n == 'suse':
|
||||
if self.dname == 'suse':
|
||||
return ["suse11", "suse12"]
|
||||
if self.n == 'ubuntu':
|
||||
if self.dname == 'ubuntu':
|
||||
return ["ubuntu1604"]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
if arch == "arm64":
|
||||
if self.n == 'ubuntu':
|
||||
if self.dname == 'ubuntu':
|
||||
return ["ubuntu1604"]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
|
||||
if re.search("(redhat|fedora|centos)", self.n):
|
||||
if re.search("(redhat|fedora|centos)", self.dname):
|
||||
return ["rhel70", "rhel62", "rhel57"]
|
||||
else:
|
||||
return super(EnterpriseDistro, self).build_os(arch)
|
||||
return super(EnterpriseDistro, self).build_os(arch)
|
||||
# pylint: enable=too-many-return-statements
|
||||
|
||||
|
||||
def main(argv):
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
|
||||
distros = [EnterpriseDistro(distro) for distro in DISTROS]
|
||||
|
||||
@ -175,7 +178,7 @@ def main(argv):
|
||||
shutil.copyfile(args.tarball, filename)
|
||||
|
||||
repo = make_package(distro, build_os, arch, spec, srcdir)
|
||||
make_repo(repo, distro, build_os, spec)
|
||||
make_repo(repo, distro, build_os)
|
||||
|
||||
made_pkg = True
|
||||
|
||||
@ -187,15 +190,15 @@ def main(argv):
|
||||
|
||||
|
||||
def tarfile(build_os, arch, spec):
|
||||
"""Return the location where we store the downloaded tarball for
|
||||
this package"""
|
||||
"""Return the location where we store the downloaded tarball for this package."""
|
||||
return "dl/mongodb-linux-%s-enterprise-%s-%s.tar.gz" % (spec.version(), build_os, arch)
|
||||
|
||||
|
||||
def setupdir(distro, build_os, arch, spec):
|
||||
"""Return the setup directory name."""
|
||||
# The setupdir will be a directory containing all inputs to the
|
||||
# distro's packaging tools (e.g., package metadata files, init
|
||||
# scripts, etc), along with the already-built binaries). In case
|
||||
# scripts, etc, along with the already-built binaries). In case
|
||||
# the following format string is unclear, an example setupdir
|
||||
# would be dst/x86_64/debian-sysvinit/wheezy/mongodb-org-unstable/
|
||||
# or dst/x86_64/redhat/rhel57/mongodb-org-unstable/
|
||||
@ -226,9 +229,10 @@ def unpack_binaries_into(build_os, arch, spec, where):
|
||||
|
||||
|
||||
def make_package(distro, build_os, arch, spec, srcdir):
|
||||
"""Construct the package for (arch, distro, spec), getting
|
||||
packaging files from srcdir and any user-specified suffix from
|
||||
suffixes"""
|
||||
"""Construct the package for (arch, distro, spec).
|
||||
|
||||
Get the packaging files from srcdir and any user-specified suffix from suffixes.
|
||||
"""
|
||||
|
||||
sdir = setupdir(distro, build_os, arch, spec)
|
||||
packager.ensure_dir(sdir)
|
||||
@ -254,16 +258,18 @@ def make_package(distro, build_os, arch, spec, srcdir):
|
||||
return distro.make_pkg(build_os, arch, spec, srcdir)
|
||||
|
||||
|
||||
def make_repo(repodir, distro, build_os, spec):
|
||||
def make_repo(repodir, distro, build_os):
|
||||
"""Make the repo."""
|
||||
if re.search("(debian|ubuntu)", repodir):
|
||||
make_deb_repo(repodir, distro, build_os, spec)
|
||||
make_deb_repo(repodir, distro, build_os)
|
||||
elif re.search("(suse|centos|redhat|fedora|amazon)", repodir):
|
||||
packager.make_rpm_repo(repodir)
|
||||
else:
|
||||
raise Exception("BUG: unsupported platform?")
|
||||
|
||||
|
||||
def make_deb_repo(repo, distro, build_os, spec):
|
||||
def make_deb_repo(repo, distro, build_os):
|
||||
"""Make the Debian repo."""
|
||||
# Note: the Debian repository Packages files must be generated
|
||||
# very carefully in order to be usable.
|
||||
oldpwd = os.getcwd()
|
||||
@ -273,19 +279,19 @@ def make_deb_repo(repo, distro, build_os, spec):
|
||||
os.path.dirname(deb)[2:]
|
||||
for deb in packager.backtick(["find", ".", "-name", "*.deb"]).split()
|
||||
])
|
||||
for d in dirs:
|
||||
s = packager.backtick(["dpkg-scanpackages", d, "/dev/null"])
|
||||
with open(d + "/Packages", "w") as f:
|
||||
f.write(s)
|
||||
b = packager.backtick(["gzip", "-9c", d + "/Packages"])
|
||||
with open(d + "/Packages.gz", "wb") as f:
|
||||
f.write(b)
|
||||
for directory in dirs:
|
||||
st = packager.backtick(["dpkg-scanpackages", directory, "/dev/null"])
|
||||
with open(directory + "/Packages", "w") as fh:
|
||||
fh.wmake_deb_reporite(st)
|
||||
bt = packager.backtick(["gzip", "-9c", directory + "/Packages"])
|
||||
with open(directory + "/Packages.gz", "wb") as fh:
|
||||
fh.write(bt)
|
||||
finally:
|
||||
os.chdir(oldpwd)
|
||||
# Notes: the Release{,.gpg} files must live in a special place,
|
||||
# and must be created after all the Packages.gz files have been
|
||||
# done.
|
||||
s = """Origin: mongodb
|
||||
s1 = """Origin: mongodb
|
||||
Label: mongodb
|
||||
Suite: %s
|
||||
Codename: %s/mongodb-enterprise
|
||||
@ -301,24 +307,25 @@ Description: MongoDB packages
|
||||
os.chdir(repo + "../../")
|
||||
s2 = packager.backtick(["apt-ftparchive", "release", "."])
|
||||
try:
|
||||
with open("Release", 'w') as f:
|
||||
f.write(s)
|
||||
f.write(s2)
|
||||
with open("Release", 'w') as fh:
|
||||
fh.write(s1)
|
||||
fh.write(s2)
|
||||
finally:
|
||||
os.chdir(oldpwd)
|
||||
|
||||
|
||||
def move_repos_into_place(src, dst):
|
||||
def move_repos_into_place(src, dst): # pylint: disable=too-many-branches
|
||||
"""Move the repos into place."""
|
||||
# Find all the stuff in src/*, move it to a freshly-created
|
||||
# directory beside dst, then play some games with symlinks so that
|
||||
# dst is a name the new stuff and dst+".old" names the previous
|
||||
# one. This feels like a lot of hooey for something so trivial.
|
||||
|
||||
# First, make a crispy fresh new directory to put the stuff in.
|
||||
i = 0
|
||||
idx = 0
|
||||
while True:
|
||||
date_suffix = time.strftime("%Y-%m-%d")
|
||||
dname = dst + ".%s.%d" % (date_suffix, i)
|
||||
dname = dst + ".%s.%d" % (date_suffix, idx)
|
||||
try:
|
||||
os.mkdir(dname)
|
||||
break
|
||||
@ -328,17 +335,17 @@ def move_repos_into_place(src, dst):
|
||||
pass
|
||||
else:
|
||||
raise exc
|
||||
i = i + 1
|
||||
idx = idx + 1
|
||||
|
||||
# Put the stuff in our new directory.
|
||||
for r in os.listdir(src):
|
||||
packager.sysassert(["cp", "-rv", src + "/" + r, dname])
|
||||
for src_file in os.listdir(src):
|
||||
packager.sysassert(["cp", "-rv", src + "/" + src_file, dname])
|
||||
|
||||
# Make a symlink to the new directory; the symlink will be renamed
|
||||
# to dst shortly.
|
||||
i = 0
|
||||
idx = 0
|
||||
while True:
|
||||
tmpnam = dst + ".TMP.%d" % i
|
||||
tmpnam = dst + ".TMP.%d" % idx
|
||||
try:
|
||||
os.symlink(dname, tmpnam)
|
||||
break
|
||||
@ -348,15 +355,15 @@ def move_repos_into_place(src, dst):
|
||||
pass
|
||||
else:
|
||||
raise exc
|
||||
i = i + 1
|
||||
idx = idx + 1
|
||||
|
||||
# Make a symlink to the old directory; this symlink will be
|
||||
# renamed shortly, too.
|
||||
oldnam = None
|
||||
if os.path.exists(dst):
|
||||
i = 0
|
||||
idx = 0
|
||||
while True:
|
||||
oldnam = dst + ".old.%d" % i
|
||||
oldnam = dst + ".old.%d" % idx
|
||||
try:
|
||||
os.symlink(os.readlink(dst), oldnam)
|
||||
break
|
||||
@ -373,4 +380,4 @@ def move_repos_into_place(src, dst):
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
main()
|
@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Converts silent test failures into non-silent failures.
|
||||
"""Convert silent test failures into non-silent failures.
|
||||
|
||||
Any test files with at least 2 executions in the report.json file that have a "silentfail" status,
|
||||
this script will change the outputted report to have a "fail" status instead.
|
||||
@ -22,11 +21,13 @@ if __name__ == "__main__" and __package__ is None:
|
||||
|
||||
|
||||
def read_json_file(json_file):
|
||||
"""Return contents of a JSON file."""
|
||||
with open(json_file) as json_data:
|
||||
return json.load(json_data)
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
|
||||
usage = "usage: %prog [options] report.json"
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
|
@ -1,26 +1,28 @@
|
||||
#!/usr/bin/env python2
|
||||
""" This program stamps the shared scons directory with a timestamp so we can
|
||||
determine the last prune time and run the prune script on a schedule.
|
||||
It is meant to be invoked from the shell:
|
||||
"""Prune check program.
|
||||
|
||||
if python prune_check.py; then
|
||||
echo 'Pruning'
|
||||
else
|
||||
echo 'Less than 24 hours, waiting ...'
|
||||
fi
|
||||
This program stamps the shared scons directory with a timestamp so we can
|
||||
determine the last prune time and run the prune script on a schedule.
|
||||
It is meant to be invoked from the shell:
|
||||
|
||||
The script can be invoked with optional arguments for mount point and 'seconds
|
||||
since last prune' (default is 86400 - 24 hours). Use -h to see options and defaults.
|
||||
if python prune_check.py; then
|
||||
echo 'Pruning'
|
||||
else
|
||||
echo 'Less than 24 hours, waiting ...'
|
||||
fi
|
||||
|
||||
python prune_check.py -m '/mount_point' -p 86400
|
||||
The script can be invoked with optional arguments for mount point and 'seconds
|
||||
since last prune' (default is 86400 - 24 hours). Use -h to see options and defaults.
|
||||
|
||||
To write the latest timestamp to a directory
|
||||
python prune_check.py -m '/mount_point' -p 86400
|
||||
|
||||
python prune_check.py -w
|
||||
To write the latest timestamp to a directory
|
||||
|
||||
If it is time to prune (ie. more than 24 hours since the last timestamp),
|
||||
the script exits with a 0 return code.
|
||||
Otherwise the script returns exit code 1.
|
||||
python prune_check.py -w
|
||||
|
||||
If it is time to prune (ie. more than 24 hours since the last timestamp),
|
||||
the script exits with a 0 return code.
|
||||
Otherwise the script returns exit code 1.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@ -33,23 +35,23 @@ DATE_TIME_STR = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
|
||||
def get_prune_file_path(mount_point):
|
||||
""" Get the shared scons directory for this AMI """
|
||||
with open('/etc/mongodb-build-system-id', 'r') as f:
|
||||
uuid = f.read().strip()
|
||||
"""Get the shared scons directory for this AMI."""
|
||||
with open('/etc/mongodb-build-system-id', 'r') as fh:
|
||||
uuid = fh.read().strip()
|
||||
return os.path.join(mount_point, uuid, 'info', 'last_prune_time')
|
||||
|
||||
|
||||
def write_last_prune_time(last_prune_time, prune_file_path):
|
||||
""" Write the last prune timestamp in a 'last_prune_time' file """
|
||||
with open(prune_file_path, 'w') as f:
|
||||
f.write(last_prune_time.strftime(DATE_TIME_STR) + '\n')
|
||||
"""Write the last prune timestamp in a 'last_prune_time' file."""
|
||||
with open(prune_file_path, 'w') as fh:
|
||||
fh.write(last_prune_time.strftime(DATE_TIME_STR) + '\n')
|
||||
|
||||
|
||||
def retrieve_last_prune_time(prune_file_path):
|
||||
""" Get the last prune time from the 'last_prune_time' file """
|
||||
"""Get the last prune time from the 'last_prune_time' file."""
|
||||
if os.path.isfile(prune_file_path):
|
||||
with open(prune_file_path, 'r') as f:
|
||||
last_prune_time_str = f.read().strip()
|
||||
with open(prune_file_path, 'r') as fh:
|
||||
last_prune_time_str = fh.read().strip()
|
||||
last_prune_time = datetime.strptime(last_prune_time_str, DATE_TIME_STR)
|
||||
else:
|
||||
last_prune_time = datetime.utcnow()
|
||||
@ -59,8 +61,9 @@ def retrieve_last_prune_time(prune_file_path):
|
||||
|
||||
|
||||
def check_last_prune_time(args):
|
||||
""" Returns exit code 0 if time to run again, else returns exit code 1
|
||||
This is meant to be called from the shell
|
||||
"""Return exit code 0 if time to run again, else return exit code 1.
|
||||
|
||||
This is meant to be called from the shell
|
||||
"""
|
||||
|
||||
seconds_since_last_prune = args.prune_seconds
|
||||
@ -87,7 +90,7 @@ def check_last_prune_time(args):
|
||||
|
||||
|
||||
def get_command_line_args():
|
||||
""" Get the command line arguments """
|
||||
"""Get the command line arguments."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-m', '--mount_point', type=str, required=False,
|
||||
help="The base mount where efs is mounted. Default is '/efs'",
|
||||
@ -102,6 +105,7 @@ def get_command_line_args():
|
||||
|
||||
|
||||
def main():
|
||||
"""Execute Main program."""
|
||||
args = get_command_line_args()
|
||||
mount_point = args.mount_point
|
||||
|
||||
|
@ -48,7 +48,7 @@ def get_py_linter(linter_filter):
|
||||
|
||||
linter_candidates = [linter for linter in _LINTERS if linter.cmd_name in linter_list]
|
||||
|
||||
if len(linter_candidates) == 0:
|
||||
if not linter_candidates:
|
||||
raise ValueError("No linters found for filter '%s'" % (linter_filter))
|
||||
|
||||
return linter_candidates
|
||||
@ -56,18 +56,12 @@ def get_py_linter(linter_filter):
|
||||
|
||||
def is_interesting_file(file_name):
|
||||
# type: (str) -> bool
|
||||
""""Return true if this file should be checked."""
|
||||
"""Return true if this file should be checked."""
|
||||
return file_name.endswith(".py") and (file_name.startswith("buildscripts/idl")
|
||||
or file_name.startswith("buildscripts/linter")
|
||||
or file_name.startswith("buildscripts/pylinters.py"))
|
||||
|
||||
|
||||
def _get_build_dir():
|
||||
# type: () -> str
|
||||
"""Get the location of the scons' build directory in case we need to download clang-format."""
|
||||
return os.path.join(git.get_base_dir(), "build")
|
||||
|
||||
|
||||
def _lint_files(linters, config_dict, file_names):
|
||||
# type: (str, Dict[str, str], List[str]) -> None
|
||||
"""Lint a list of files with clang-format."""
|
||||
@ -123,7 +117,7 @@ def _fix_files(linters, config_dict, file_names):
|
||||
# Get a list of linters which return a valid command for get_fix_cmd()
|
||||
fix_list = [fixer for fixer in linter_list if fixer.get_fix_cmd_args("ignore")]
|
||||
|
||||
if len(fix_list) == 0:
|
||||
if not fix_list:
|
||||
raise ValueError("Cannot find any linters '%s' that support fixing." % (linters))
|
||||
|
||||
lint_runner = runner.LintRunner()
|
||||
@ -152,7 +146,7 @@ def fix_func(linters, config_dict, file_names):
|
||||
|
||||
def main():
|
||||
# type: () -> None
|
||||
"""Main entry point."""
|
||||
"""Execute Main entry point."""
|
||||
|
||||
parser = argparse.ArgumentParser(description='PyLinter frontend.')
|
||||
|
||||
|
@ -21,7 +21,7 @@ if os.name == "posix" and sys.version_info[0] == 2:
|
||||
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
|
||||
" available. When using the subprocess module, a child process may trigger"
|
||||
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
|
||||
import subprocess
|
||||
import subprocess # type: ignore
|
||||
else:
|
||||
import subprocess
|
||||
|
||||
@ -42,7 +42,7 @@ _SSH_CONNECTION_ERRORS = [
|
||||
|
||||
|
||||
def posix_path(path):
|
||||
""" Returns posix path, used on Windows since scp requires posix style paths. """
|
||||
"""Return posix path, used on Windows since scp requires posix style paths."""
|
||||
# If path is already quoted, we need to remove the quotes before calling
|
||||
path_quote = "\'" if path.startswith("\'") else ""
|
||||
path_quote = "\"" if path.startswith("\"") else path_quote
|
||||
@ -54,11 +54,13 @@ def posix_path(path):
|
||||
return "{quote}{path}{quote}".format(quote=path_quote, path=new_path)
|
||||
|
||||
|
||||
class RemoteOperations(object):
|
||||
class RemoteOperations(object): # pylint: disable=too-many-instance-attributes
|
||||
"""Class to support remote operations."""
|
||||
|
||||
def __init__(self, user_host, ssh_connection_options=None, ssh_options=None, scp_options=None,
|
||||
retries=0, retry_sleep=0, debug=False, shell_binary="/bin/bash", use_shell=False):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, user_host, ssh_connection_options=None, ssh_options=None, scp_options=None,
|
||||
retries=0, retry_sleep=0, debug=False, shell_binary="/bin/bash", use_shell=False):
|
||||
"""Initialize RemoteOperations."""
|
||||
|
||||
self.user_host = user_host
|
||||
self.ssh_connection_options = ssh_connection_options if ssh_connection_options else ""
|
||||
@ -85,7 +87,7 @@ class RemoteOperations(object):
|
||||
return process.poll(), buff_stdout
|
||||
|
||||
def _remote_access(self):
|
||||
""" This will check if a remote session is possible. """
|
||||
"""Check if a remote session is possible."""
|
||||
cmd = "ssh {} {} {} date".format(self.ssh_connection_options, self.ssh_options,
|
||||
self.user_host)
|
||||
attempt_num = 0
|
||||
@ -108,19 +110,20 @@ class RemoteOperations(object):
|
||||
return self._call(cmd)
|
||||
|
||||
def access_established(self):
|
||||
""" Returns True if initial access was establsished. """
|
||||
"""Return True if initial access was establsished."""
|
||||
return not self._access_code
|
||||
|
||||
def access_info(self):
|
||||
""" Returns return code and output buffer from initial access attempt(s). """
|
||||
"""Return the return code and output buffer from initial access attempt(s)."""
|
||||
return self._access_code, self._access_buff
|
||||
|
||||
def operation(self, operation_type, operation_param, operation_dir=None):
|
||||
""" Main entry for remote operations. Returns (code, output).
|
||||
def operation( # pylint: disable=too-many-branches
|
||||
self, operation_type, operation_param, operation_dir=None):
|
||||
"""Execute Main entry for remote operations. Returns (code, output).
|
||||
|
||||
'operation_type' supports remote shell and copy operations.
|
||||
'operation_param' can either be a list or string of commands or files.
|
||||
'operation_dir' is '.' if unspecified for 'copy_*'.
|
||||
'operation_type' supports remote shell and copy operations.
|
||||
'operation_param' can either be a list or string of commands or files.
|
||||
'operation_dir' is '.' if unspecified for 'copy_*'.
|
||||
"""
|
||||
|
||||
if not self.access_established():
|
||||
@ -195,23 +198,23 @@ class RemoteOperations(object):
|
||||
return final_ret, buff
|
||||
|
||||
def shell(self, operation_param, operation_dir=None):
|
||||
""" Helper for remote shell operations. """
|
||||
"""Provide helper for remote shell operations."""
|
||||
return self.operation(operation_type="shell", operation_param=operation_param,
|
||||
operation_dir=operation_dir)
|
||||
|
||||
def copy_to(self, operation_param, operation_dir=None):
|
||||
""" Helper for remote copy_to operations. """
|
||||
"""Provide helper for remote copy_to operations."""
|
||||
return self.operation(operation_type="copy_to", operation_param=operation_param,
|
||||
operation_dir=operation_dir)
|
||||
|
||||
def copy_from(self, operation_param, operation_dir=None):
|
||||
""" Helper for remote copy_from operations. """
|
||||
"""Provide helper for remote copy_from operations."""
|
||||
return self.operation(operation_type="copy_from", operation_param=operation_param,
|
||||
operation_dir=operation_dir)
|
||||
|
||||
|
||||
def main():
|
||||
""" Main program. """
|
||||
def main(): # pylint: disable=too-many-branches,too-many-statements
|
||||
"""Execute Main program."""
|
||||
|
||||
parser = optparse.OptionParser(description=__doc__)
|
||||
control_options = optparse.OptionGroup(parser, "Control options")
|
||||
@ -336,10 +339,10 @@ def main():
|
||||
user_host=options.user_host, ssh_connection_options=ssh_connection_options,
|
||||
ssh_options=ssh_options, scp_options=scp_options, retries=options.retries,
|
||||
retry_sleep=options.retry_sleep, debug=options.debug)
|
||||
ret_code, buffer = remote_op.operation(options.operation, operation_param, operation_dir)
|
||||
ret_code, buff = remote_op.operation(options.operation, operation_param, operation_dir)
|
||||
if options.verbose:
|
||||
print("Return code: {} for command {}".format(ret_code, sys.argv))
|
||||
print(buffer)
|
||||
print(buff)
|
||||
|
||||
sys.exit(ret_code)
|
||||
|
||||
|
@ -7,11 +7,11 @@ pyyaml == 3.11
|
||||
unittest-xml-reporting == 2.1.0
|
||||
# Linters
|
||||
yapf == 0.21.0
|
||||
mypy == 0.501 ; python_version > "3"
|
||||
mypy == 0.580 ; python_version > "3"
|
||||
# typing in Python 2 for mypy
|
||||
typing == 3.6.1; python_version < "3"
|
||||
pylint == 1.6.5
|
||||
pydocstyle == 1.1.1
|
||||
pylint == 1.8.3
|
||||
pydocstyle == 2.1.1
|
||||
# resmoke.py
|
||||
-r resmokelib/requirements.txt
|
||||
# generate_error_codes.py
|
||||
|
@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Command line utility for executing MongoDB tests of all kinds.
|
||||
"""
|
||||
"""Command line utility for executing MongoDB tests of all kinds."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -14,14 +12,13 @@ import time
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts import resmokelib
|
||||
from buildscripts import resmokelib # pylint: disable=wrong-import-position
|
||||
|
||||
|
||||
def _execute_suite(suite):
|
||||
"""
|
||||
Executes the test suite, failing fast if requested.
|
||||
def _execute_suite(suite): # pylint: disable=too-many-branches,too-many-return-statements
|
||||
"""Execute the test suite, failing fast if requested.
|
||||
|
||||
Returns true if the execution of the suite was interrupted by the
|
||||
Return true if the execution of the suite was interrupted by the
|
||||
user, and false otherwise.
|
||||
"""
|
||||
|
||||
@ -36,7 +33,7 @@ def _execute_suite(suite):
|
||||
if resmokelib.config.DRY_RUN == "tests":
|
||||
sb = []
|
||||
sb.append("Tests that would be run in suite %s:" % suite.get_display_name())
|
||||
if len(suite.tests) > 0:
|
||||
if suite.tests:
|
||||
for test in suite.tests:
|
||||
sb.append(test)
|
||||
else:
|
||||
@ -44,7 +41,7 @@ def _execute_suite(suite):
|
||||
logger.info("\n".join(sb))
|
||||
sb = []
|
||||
sb.append("Tests that would be excluded from suite %s:" % suite.get_display_name())
|
||||
if len(suite.excluded) > 0:
|
||||
if suite.excluded:
|
||||
for test in suite.excluded:
|
||||
sb.append(test)
|
||||
else:
|
||||
@ -56,7 +53,7 @@ def _execute_suite(suite):
|
||||
suite.return_code = 0
|
||||
return False
|
||||
|
||||
if len(suite.tests) == 0:
|
||||
if not suite.tests:
|
||||
logger.info("Skipping %ss, no tests to run", suite.test_kind)
|
||||
|
||||
# Set a successful return code on the test suite because we want to output the tests
|
||||
@ -85,7 +82,7 @@ def _execute_suite(suite):
|
||||
except IOError:
|
||||
suite.return_code = 74 # Exit code for IOError on POSIX systems.
|
||||
return True
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
logger.exception("Encountered an error when running %ss of suite %s.", suite.test_kind,
|
||||
suite.get_display_name())
|
||||
suite.return_code = 2
|
||||
@ -93,6 +90,7 @@ def _execute_suite(suite):
|
||||
finally:
|
||||
if archive:
|
||||
archive.exit()
|
||||
return True
|
||||
|
||||
|
||||
def _log_summary(logger, suites, time_taken):
|
||||
@ -107,8 +105,7 @@ def _summarize_suite(suite):
|
||||
|
||||
|
||||
def _dump_suite_config(suite, logging_config):
|
||||
"""
|
||||
Returns a string that represents the YAML configuration of a suite.
|
||||
"""Return a string that represents the YAML configuration of a suite.
|
||||
|
||||
TODO: include the "options" key in the result
|
||||
"""
|
||||
@ -126,9 +123,9 @@ def _dump_suite_config(suite, logging_config):
|
||||
|
||||
|
||||
def find_suites_by_test(suites):
|
||||
"""
|
||||
Looks up what other resmoke suites run the tests specified in the suites
|
||||
parameter. Returns a dict keyed by test name, value is array of suite names.
|
||||
"""Look up what other resmoke suites run the tests specified in the suites parameter.
|
||||
|
||||
Return a dict keyed by test name, value is array of suite names.
|
||||
"""
|
||||
|
||||
memberships = {}
|
||||
@ -146,14 +143,10 @@ def _list_suites_and_exit(logger, exit_code=0):
|
||||
|
||||
|
||||
class Main(object):
|
||||
"""
|
||||
A class for executing potentially multiple resmoke.py test suites.
|
||||
"""
|
||||
"""A class for executing potentially multiple resmoke.py test suites."""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the Main instance by parsing the command line arguments.
|
||||
"""
|
||||
"""Initialize the Main instance by parsing the command line arguments."""
|
||||
|
||||
self.__start_time = time.time()
|
||||
|
||||
@ -162,17 +155,13 @@ class Main(object):
|
||||
self.__args = args
|
||||
|
||||
def _get_suites(self):
|
||||
"""
|
||||
Returns a list of resmokelib.testing.suite.Suite instances to execute.
|
||||
"""
|
||||
"""Return a list of resmokelib.testing.suite.Suite instances to execute."""
|
||||
|
||||
return resmokelib.suitesconfig.get_suites(
|
||||
suite_files=self.__values.suite_files.split(","), test_files=self.__args)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Executes the list of resmokelib.testing.suite.Suite instances returned by _get_suites().
|
||||
"""
|
||||
"""Execute the list of resmokelib.testing.suite.Suite instances."""
|
||||
|
||||
logging_config = resmokelib.parser.get_logging_config(self.__values)
|
||||
resmokelib.logging.loggers.configure_loggers(logging_config)
|
||||
|
@ -1,3 +1,4 @@
|
||||
"""Resmokeconfig module."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .suites import NAMED_SUITES
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Defines a mapping of shortened names for logger configuration files to
|
||||
their full path.
|
||||
"""
|
||||
"""Defines a mapping of shortened names for logger configuration files to their full path."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,11 +7,9 @@ import os.path
|
||||
|
||||
|
||||
def _get_named_loggers():
|
||||
"""
|
||||
Explores this directory for any YAML configuration files.
|
||||
"""Explore this directory for any YAML configuration files.
|
||||
|
||||
Returns a mapping of basenames without the file extension to their
|
||||
full path.
|
||||
Returns a mapping of basenames without the file extension to their full path.
|
||||
"""
|
||||
|
||||
dirname = os.path.dirname(__file__)
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Defines a mapping of shortened names for suite configuration files to
|
||||
their full path.
|
||||
"""
|
||||
"""Defines a mapping of shortened names for suite configuration files to their full path."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,11 +7,9 @@ import os.path
|
||||
|
||||
|
||||
def _get_named_suites():
|
||||
"""
|
||||
Explores this directory for any YAML configuration files.
|
||||
"""Explore this directory for any YAML configuration files.
|
||||
|
||||
Returns a mapping of basenames without the file extension to their
|
||||
full path.
|
||||
Returns a mapping of basenames without the file extension to their full path.
|
||||
"""
|
||||
|
||||
dirname = os.path.dirname(__file__)
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Configuration options for resmoke.py.
|
||||
"""
|
||||
"""Configuration options for resmoke.py."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -113,19 +111,16 @@ _SuiteOptions = collections.namedtuple("_SuiteOptions", [
|
||||
|
||||
|
||||
class SuiteOptions(_SuiteOptions):
|
||||
"""
|
||||
A class for representing top-level options to resmoke.py that can also be set at the
|
||||
suite-level.
|
||||
"""
|
||||
"""Represent top-level options to resmoke.py that can also be set at the suite-level."""
|
||||
|
||||
INHERIT = object()
|
||||
ALL_INHERITED = None
|
||||
|
||||
@classmethod
|
||||
def combine(cls, *suite_options_list):
|
||||
"""
|
||||
Returns a SuiteOptions instance representing the combination of all SuiteOptions in
|
||||
'suite_options_list'.
|
||||
"""Return SuiteOptions instance.
|
||||
|
||||
This object represents the combination of all SuiteOptions in 'suite_options_list'.
|
||||
"""
|
||||
|
||||
combined_options = cls.ALL_INHERITED._asdict()
|
||||
@ -158,8 +153,9 @@ class SuiteOptions(_SuiteOptions):
|
||||
return cls(**combined_options)
|
||||
|
||||
def resolve(self):
|
||||
"""
|
||||
Returns a SuiteOptions instance representing the options overridden at the suite-level and
|
||||
"""Return a SuiteOptions instance.
|
||||
|
||||
This represents the options overridden at the suite-level and
|
||||
the inherited options from the top-level.
|
||||
"""
|
||||
|
||||
@ -183,8 +179,8 @@ class SuiteOptions(_SuiteOptions):
|
||||
return SuiteOptions(**options)
|
||||
|
||||
|
||||
SuiteOptions.ALL_INHERITED = SuiteOptions(**dict(
|
||||
zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
|
||||
SuiteOptions.ALL_INHERITED = SuiteOptions( # type: ignore
|
||||
**dict(zip(SuiteOptions._fields, itertools.repeat(SuiteOptions.INHERIT))))
|
||||
|
||||
##
|
||||
# Variables that are set by the user at the command line or with --options.
|
||||
|
@ -1,3 +1,4 @@
|
||||
"""Resmokelib core module."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import process
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Class used to allocate ports for use by various mongod and mongos
|
||||
processes involved in running the tests.
|
||||
"""
|
||||
"""Class used to allocate ports for mongod and mongos processes involved in running the tests."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -14,16 +11,14 @@ from .. import errors
|
||||
|
||||
|
||||
def _check_port(func):
|
||||
"""
|
||||
A decorator that verifies the port returned by the wrapped function
|
||||
is in the valid range.
|
||||
"""Provide decorator that verifies the port returned by the wrapped function is in range.
|
||||
|
||||
Returns the port if it is valid, and raises a PortAllocationError
|
||||
otherwise.
|
||||
Returns the port if it is valid, and raises a PortAllocationError otherwise.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
"""Provide wrapper function."""
|
||||
port = func(*args, **kwargs)
|
||||
|
||||
if port < 0:
|
||||
@ -39,8 +34,7 @@ def _check_port(func):
|
||||
|
||||
|
||||
class PortAllocator(object):
|
||||
"""
|
||||
This class is responsible for allocating ranges of ports.
|
||||
"""Class responsible for allocating ranges of ports.
|
||||
|
||||
It reserves a range of ports for each job with the first part of
|
||||
that range used for the fixture started by that job, and the second
|
||||
@ -62,13 +56,12 @@ class PortAllocator(object):
|
||||
_NUM_USED_PORTS_LOCK = threading.Lock()
|
||||
|
||||
# Used to keep track of how many ports a fixture has allocated.
|
||||
_NUM_USED_PORTS = collections.defaultdict(int)
|
||||
_NUM_USED_PORTS = collections.defaultdict(int) # type: ignore
|
||||
|
||||
@classmethod
|
||||
@_check_port
|
||||
def next_fixture_port(cls, job_num):
|
||||
"""
|
||||
Returns the next port for a fixture to use.
|
||||
"""Return the next port for a fixture to use.
|
||||
|
||||
Raises a PortAllocationError if the fixture has requested more
|
||||
ports than are reserved per job, or if the next port is not a
|
||||
@ -91,9 +84,7 @@ class PortAllocator(object):
|
||||
@classmethod
|
||||
@_check_port
|
||||
def min_test_port(cls, job_num):
|
||||
"""
|
||||
For the given job, returns the lowest port that is reserved for
|
||||
use by tests.
|
||||
"""Return the lowest port that is reserved for use by tests, for specified job.
|
||||
|
||||
Raises a PortAllocationError if that port is higher than the
|
||||
maximum port.
|
||||
@ -103,9 +94,7 @@ class PortAllocator(object):
|
||||
@classmethod
|
||||
@_check_port
|
||||
def max_test_port(cls, job_num):
|
||||
"""
|
||||
For the given job, returns the highest port that is reserved
|
||||
for use by tests.
|
||||
"""Return the highest port that is reserved for use by tests, for specified job.
|
||||
|
||||
Raises a PortAllocationError if that port is higher than the
|
||||
maximum port.
|
||||
@ -115,8 +104,7 @@ class PortAllocator(object):
|
||||
|
||||
@classmethod
|
||||
def reset(cls):
|
||||
"""
|
||||
Resets the internal state of the PortAllocator.
|
||||
"""Reset the internal state of the PortAllocator.
|
||||
|
||||
This method is intended to be called each time resmoke.py starts
|
||||
a new test suite.
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
Helper class to read output of a subprocess. Used to avoid deadlocks
|
||||
from the pipe buffer filling up and blocking the subprocess while it's
|
||||
Helper class to read output of a subprocess.
|
||||
|
||||
Used to avoid deadlocks from the pipe buffer filling up and blocking the subprocess while it's
|
||||
being waited on.
|
||||
"""
|
||||
|
||||
@ -9,11 +10,8 @@ from __future__ import absolute_import
|
||||
import threading
|
||||
|
||||
|
||||
class LoggerPipe(threading.Thread):
|
||||
"""
|
||||
Asynchronously reads the output of a subprocess and sends it to a
|
||||
logger.
|
||||
"""
|
||||
class LoggerPipe(threading.Thread): # pylint: disable=too-many-instance-attributes
|
||||
"""Asynchronously reads the output of a subprocess and sends it to a logger."""
|
||||
|
||||
# The start() and join() methods are not intended to be called directly on the LoggerPipe
|
||||
# instance. Since we override them for that effect, the super's version are preserved here.
|
||||
@ -21,10 +19,7 @@ class LoggerPipe(threading.Thread):
|
||||
__join = threading.Thread.join
|
||||
|
||||
def __init__(self, logger, level, pipe_out):
|
||||
"""
|
||||
Initializes the LoggerPipe with the specified logger, logging
|
||||
level to use, and pipe to read from.
|
||||
"""
|
||||
"""Initialize the LoggerPipe with the specified arguments."""
|
||||
|
||||
threading.Thread.__init__(self)
|
||||
# Main thread should not call join() when exiting
|
||||
@ -43,12 +38,11 @@ class LoggerPipe(threading.Thread):
|
||||
LoggerPipe.__start(self)
|
||||
|
||||
def start(self):
|
||||
"""Start not implemented."""
|
||||
raise NotImplementedError("start should not be called directly")
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Reads the output from 'pipe_out' and logs each line to 'logger'.
|
||||
"""
|
||||
"""Read the output from 'pipe_out' and logs each line to 'logger'."""
|
||||
|
||||
with self.__lock:
|
||||
self.__started = True
|
||||
@ -70,14 +64,17 @@ class LoggerPipe(threading.Thread):
|
||||
self.__condition.notify_all()
|
||||
|
||||
def join(self, timeout=None):
|
||||
"""Join not implemented."""
|
||||
raise NotImplementedError("join should not be called directly")
|
||||
|
||||
def wait_until_started(self):
|
||||
"""Wait until started."""
|
||||
with self.__lock:
|
||||
while not self.__started:
|
||||
self.__condition.wait()
|
||||
|
||||
def wait_until_finished(self):
|
||||
"""Wait until finished."""
|
||||
with self.__lock:
|
||||
while not self.__finished:
|
||||
self.__condition.wait()
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
A more reliable way to create and destroy processes.
|
||||
"""A more reliable way to create and destroy processes.
|
||||
|
||||
Uses job objects when running on Windows to ensure that all created
|
||||
processes are terminated.
|
||||
@ -30,12 +29,12 @@ if os.name == "posix" and sys.version_info[0] == 2:
|
||||
warnings.warn(("Falling back to using the subprocess module because subprocess32 isn't"
|
||||
" available. When using the subprocess module, a child process may trigger"
|
||||
" an invalid free(). See SERVER-22219 for more details."), RuntimeWarning)
|
||||
import subprocess
|
||||
import subprocess # type: ignore
|
||||
else:
|
||||
import subprocess
|
||||
|
||||
from . import pipe
|
||||
from .. import utils
|
||||
from . import pipe # pylint: disable=wrong-import-position
|
||||
from .. import utils # pylint: disable=wrong-import-position
|
||||
|
||||
# Attempt to avoid race conditions (e.g. hangs caused by a file descriptor being left open) when
|
||||
# starting subprocesses concurrently from multiple threads by guarding calls to subprocess.Popen()
|
||||
@ -87,15 +86,12 @@ if sys.platform == "win32":
|
||||
|
||||
|
||||
class Process(object):
|
||||
"""
|
||||
Wrapper around subprocess.Popen class.
|
||||
"""
|
||||
"""Wrapper around subprocess.Popen class."""
|
||||
|
||||
# pylint: disable=protected-access
|
||||
|
||||
def __init__(self, logger, args, env=None, env_vars=None):
|
||||
"""
|
||||
Initializes the process with the specified logger, arguments,
|
||||
and environment.
|
||||
"""
|
||||
"""Initialize the process with the specified logger, arguments, and environment."""
|
||||
|
||||
# Ensure that executable files that don't already have an
|
||||
# extension on Windows have a ".exe" extension.
|
||||
@ -115,10 +111,7 @@ class Process(object):
|
||||
self._stderr_pipe = None
|
||||
|
||||
def start(self):
|
||||
"""
|
||||
Starts the process and the logger pipes for its stdout and
|
||||
stderr.
|
||||
"""
|
||||
"""Start the process and the logger pipes for its stdout and stderr."""
|
||||
|
||||
creation_flags = 0
|
||||
if sys.platform == "win32" and _JOB_OBJECT is not None:
|
||||
@ -158,12 +151,12 @@ class Process(object):
|
||||
if return_code == win32con.STILL_ACTIVE:
|
||||
raise
|
||||
|
||||
def stop(self, kill=False):
|
||||
def stop(self, kill=False): # pylint: disable=too-many-branches
|
||||
"""Terminate the process."""
|
||||
if sys.platform == "win32":
|
||||
|
||||
# Attempt to cleanly shutdown mongod.
|
||||
if not kill and len(self.args) > 0 and self.args[0].find("mongod") != -1:
|
||||
if not kill and self.args and self.args[0].find("mongod") != -1:
|
||||
mongo_signal_handle = None
|
||||
try:
|
||||
mongo_signal_handle = win32event.OpenEvent(
|
||||
@ -218,13 +211,11 @@ class Process(object):
|
||||
raise
|
||||
|
||||
def poll(self):
|
||||
"""Poll."""
|
||||
return self._process.poll()
|
||||
|
||||
def wait(self):
|
||||
"""
|
||||
Waits until the process has terminated and all output has been
|
||||
consumed by the logger pipes.
|
||||
"""
|
||||
"""Wait until process has terminated and all output has been consumed by the logger pipes."""
|
||||
|
||||
return_code = self._process.wait()
|
||||
|
||||
@ -236,9 +227,7 @@ class Process(object):
|
||||
return return_code
|
||||
|
||||
def as_command(self):
|
||||
"""
|
||||
Returns an equivalent command line invocation of the process.
|
||||
"""
|
||||
"""Return an equivalent command line invocation of the process."""
|
||||
|
||||
default_env = os.environ
|
||||
env_diff = self.env.copy()
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Utility functions to create MongoDB processes.
|
||||
"""Utility functions to create MongoDB processes.
|
||||
|
||||
Handles all the nitty-gritty parameter conversion.
|
||||
"""
|
||||
@ -16,11 +15,9 @@ from .. import config
|
||||
from .. import utils
|
||||
|
||||
|
||||
def mongod_program(logger, executable=None, process_kwargs=None, **kwargs):
|
||||
"""
|
||||
Returns a Process instance that starts a mongod executable with
|
||||
arguments constructed from 'kwargs'.
|
||||
"""
|
||||
def mongod_program( # pylint: disable=too-many-branches
|
||||
logger, executable=None, process_kwargs=None, **kwargs):
|
||||
"""Return a Process instance that starts mongod arguments constructed from 'kwargs'."""
|
||||
|
||||
executable = utils.default_if_none(executable, config.DEFAULT_MONGOD_EXECUTABLE)
|
||||
args = [executable]
|
||||
@ -117,10 +114,7 @@ def mongod_program(logger, executable=None, process_kwargs=None, **kwargs):
|
||||
|
||||
|
||||
def mongos_program(logger, executable=None, process_kwargs=None, **kwargs):
|
||||
"""
|
||||
Returns a Process instance that starts a mongos executable with
|
||||
arguments constructed from 'kwargs'.
|
||||
"""
|
||||
"""Return a Process instance that starts a mongos with arguments constructed from 'kwargs'."""
|
||||
|
||||
executable = utils.default_if_none(executable, config.DEFAULT_MONGOS_EXECUTABLE)
|
||||
args = [executable]
|
||||
@ -143,11 +137,12 @@ def mongos_program(logger, executable=None, process_kwargs=None, **kwargs):
|
||||
return _process.Process(logger, args, **process_kwargs)
|
||||
|
||||
|
||||
def mongo_shell_program(logger, executable=None, connection_string=None, filename=None,
|
||||
process_kwargs=None, **kwargs):
|
||||
"""
|
||||
Returns a Process instance that starts a mongo shell with the given connection string and
|
||||
arguments constructed from 'kwargs'.
|
||||
def mongo_shell_program( # pylint: disable=too-many-branches,too-many-locals,too-many-statements
|
||||
logger, executable=None, connection_string=None, filename=None, process_kwargs=None,
|
||||
**kwargs):
|
||||
"""Return a Process instance that starts a mongo shell.
|
||||
|
||||
The shell is started with the given connection string and arguments constructed from 'kwargs'.
|
||||
"""
|
||||
connection_string = utils.default_if_none(config.SHELL_CONN_STRING, connection_string)
|
||||
|
||||
@ -256,8 +251,7 @@ def mongo_shell_program(logger, executable=None, connection_string=None, filenam
|
||||
|
||||
|
||||
def _format_shell_vars(sb, path, value):
|
||||
"""
|
||||
Formats 'value' in a way that can be passed to --eval.
|
||||
"""Format 'value' in a way that can be passed to --eval.
|
||||
|
||||
If 'value' is a dictionary, then it is unrolled into the creation of
|
||||
a new JSON object with properties assigned for each key of the
|
||||
@ -277,10 +271,7 @@ def _format_shell_vars(sb, path, value):
|
||||
|
||||
|
||||
def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **kwargs):
|
||||
"""
|
||||
Returns a Process instance that starts a dbtest executable with
|
||||
arguments constructed from 'kwargs'.
|
||||
"""
|
||||
"""Return a Process instance that starts a dbtest with arguments constructed from 'kwargs'."""
|
||||
|
||||
executable = utils.default_if_none(executable, config.DEFAULT_DBTEST_EXECUTABLE)
|
||||
args = [executable]
|
||||
@ -295,10 +286,11 @@ def dbtest_program(logger, executable=None, suites=None, process_kwargs=None, **
|
||||
|
||||
|
||||
def generic_program(logger, args, process_kwargs=None, **kwargs):
|
||||
"""
|
||||
Returns a Process instance that starts an arbitrary executable with
|
||||
arguments constructed from 'kwargs'. The args parameter is an array
|
||||
of strings containing the command to execute.
|
||||
"""Return a Process instance that starts an arbitrary executable.
|
||||
|
||||
The executable arguments are constructed from 'kwargs'.
|
||||
|
||||
The args parameter is an array of strings containing the command to execute.
|
||||
"""
|
||||
|
||||
if not utils.is_string_list(args):
|
||||
@ -311,9 +303,9 @@ def generic_program(logger, args, process_kwargs=None, **kwargs):
|
||||
|
||||
|
||||
def _format_test_data_set_parameters(set_parameters):
|
||||
"""
|
||||
Converts key-value pairs from 'set_parameters' into the comma
|
||||
delimited list format expected by the parser in servers.js.
|
||||
"""Convert key-value pairs from 'set_parameters' into a comma delimited list format.
|
||||
|
||||
The format is used by the parser in servers.js.
|
||||
|
||||
WARNING: the parsing logic in servers.js is very primitive.
|
||||
Non-scalar options such as logComponentVerbosity will not work
|
||||
@ -332,9 +324,9 @@ def _format_test_data_set_parameters(set_parameters):
|
||||
|
||||
|
||||
def _apply_set_parameters(args, set_parameter):
|
||||
"""
|
||||
Converts key-value pairs from 'kwargs' into --setParameter key=value
|
||||
arguments to an executable and appends them to 'args'.
|
||||
"""Convert key-value pairs from 'kwargs' into --setParameter key=value arguments.
|
||||
|
||||
This result is appended to 'args'.
|
||||
"""
|
||||
|
||||
for param_name in set_parameter:
|
||||
@ -347,10 +339,9 @@ def _apply_set_parameters(args, set_parameter):
|
||||
|
||||
|
||||
def _apply_kwargs(args, kwargs):
|
||||
"""
|
||||
Converts key-value pairs from 'kwargs' into --key value arguments
|
||||
to an executable and appends them to 'args'.
|
||||
"""Convert key-value pairs from 'kwargs' into --key value arguments.
|
||||
|
||||
This result is appended to 'args'.
|
||||
A --flag without a value is represented with the empty string.
|
||||
"""
|
||||
|
||||
@ -363,9 +354,7 @@ def _apply_kwargs(args, kwargs):
|
||||
|
||||
|
||||
def _set_keyfile_permissions(opts):
|
||||
"""
|
||||
Change the permissions of keyfiles in 'opts' to 600, i.e. only the
|
||||
user can read and write the file.
|
||||
"""Change the permissions of keyfiles in 'opts' to 600, (only user can read and write the file).
|
||||
|
||||
This necessary to avoid having the mongod/mongos fail to start up
|
||||
because "permissions on the keyfiles are too open".
|
||||
|
@ -1,59 +1,47 @@
|
||||
"""
|
||||
Exceptions raised by resmoke.py.
|
||||
"""
|
||||
"""Exceptions raised by resmoke.py."""
|
||||
|
||||
|
||||
class ResmokeError(Exception):
|
||||
"""
|
||||
Base class for all resmoke.py exceptions.
|
||||
class ResmokeError(Exception): # noqa: D204
|
||||
"""Base class for all resmoke.py exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class SuiteNotFound(ResmokeError): # noqa: D204
|
||||
"""A suite that isn't recognized was specified."""
|
||||
pass
|
||||
|
||||
|
||||
class StopExecution(ResmokeError): # noqa: D204
|
||||
"""Exception raised when resmoke.py should stop executing tests if failing fast is enabled."""
|
||||
pass
|
||||
|
||||
|
||||
class UserInterrupt(StopExecution): # noqa: D204
|
||||
"""Exception raised when a user signals resmoke.py to unconditionally stop executing tests."""
|
||||
pass
|
||||
|
||||
|
||||
class TestFailure(ResmokeError): # noqa: D204
|
||||
"""Exception raised by a hook in the after_test method.
|
||||
|
||||
Raised if it determines the the previous test should be marked as a failure.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class SuiteNotFound(ResmokeError):
|
||||
"""
|
||||
A suite that isn't recognized was specified.
|
||||
"""
|
||||
pass
|
||||
class ServerFailure(TestFailure): # noqa: D204
|
||||
"""Exception raised by a hook in the after_test method.
|
||||
|
||||
|
||||
class StopExecution(ResmokeError):
|
||||
"""
|
||||
Exception that is raised when resmoke.py should stop executing tests
|
||||
if failing fast is enabled.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserInterrupt(StopExecution):
|
||||
"""
|
||||
Exception that is raised when a user signals resmoke.py to
|
||||
unconditionally stop executing tests.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class TestFailure(ResmokeError):
|
||||
"""
|
||||
Exception that is raised by a hook in the after_test method if it
|
||||
determines the the previous test should be marked as a failure.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ServerFailure(TestFailure):
|
||||
"""
|
||||
Exception that is raised by a hook in the after_test method if it
|
||||
detects that the fixture did not exit cleanly and should be marked
|
||||
Raised if it detects that the fixture did not exit cleanly and should be marked
|
||||
as a failure.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class PortAllocationError(ResmokeError):
|
||||
"""
|
||||
Exception that is raised by the PortAllocator if a port is requested
|
||||
outside of the range of valid ports, or if a fixture requests more
|
||||
ports than were reserved for that job.
|
||||
class PortAllocationError(ResmokeError): # noqa: D204
|
||||
"""Exception that is raised by the PortAllocator.
|
||||
|
||||
Raised if a port is requested outside of the range of valid ports, or if a
|
||||
fixture requests more ports than were reserved for that job.
|
||||
"""
|
||||
pass
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Extension to the logging package to support buildlogger.
|
||||
"""
|
||||
"""Extension to the logging package to support buildlogger."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Defines handlers for communicating with a buildlogger server.
|
||||
"""
|
||||
"""Define handlers for communicating with a buildlogger server."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -27,19 +25,17 @@ BUILDLOGGER_FALLBACK = None
|
||||
|
||||
|
||||
def _log_on_error(func):
|
||||
"""
|
||||
A decorator that causes any exceptions to be logged by the
|
||||
"buildlogger" Logger instance.
|
||||
"""Provide decorator that causes exceptions to be logged by the "buildlogger" Logger instance.
|
||||
|
||||
Returns the wrapped function's return value, or None if an error
|
||||
was encountered.
|
||||
Return the wrapped function's return value, or None if an error was encountered.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
"""Provide wrapper function."""
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
|
||||
return None
|
||||
|
||||
@ -50,9 +46,8 @@ class _LogsSplitter(object):
|
||||
"""Class with static methods used to split list of log lines into smaller batches."""
|
||||
|
||||
@staticmethod
|
||||
def split_logs(log_lines, max_size):
|
||||
"""
|
||||
Splits the log lines into batches of size less than or equal to max_size.
|
||||
def split_logs(log_lines, max_size): # noqa: D406,D407,D411,D413
|
||||
"""Split the log lines into batches of size less than or equal to max_size.
|
||||
|
||||
Args:
|
||||
log_lines: A list of log lines.
|
||||
@ -65,8 +60,8 @@ class _LogsSplitter(object):
|
||||
return [log_lines]
|
||||
|
||||
def line_size(line):
|
||||
"""
|
||||
Computes the encoded JSON size of a log line as part of an array.
|
||||
"""Compute the encoded JSON size of a log line as part of an array.
|
||||
|
||||
2 is added to each string size to account for the array representation of the logs,
|
||||
as each line is preceded by a '[' or a space and followed by a ',' or a ']'.
|
||||
"""
|
||||
@ -88,17 +83,11 @@ class _LogsSplitter(object):
|
||||
|
||||
|
||||
class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
"""
|
||||
Base class of the buildlogger handler for the global logs and the
|
||||
handler for the test logs.
|
||||
"""
|
||||
"""Base class of the buildlogger handler for global logs and handler for test logs."""
|
||||
|
||||
def __init__(self, build_config, endpoint, capacity=_SEND_AFTER_LINES,
|
||||
interval_secs=_SEND_AFTER_SECS):
|
||||
"""
|
||||
Initializes the buildlogger handler with the build id and
|
||||
credentials.
|
||||
"""
|
||||
"""Initialize the buildlogger handler with the build id and credentials."""
|
||||
|
||||
handlers.BufferedHandler.__init__(self, capacity, interval_secs)
|
||||
|
||||
@ -111,9 +100,9 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
self.max_size = None
|
||||
|
||||
def process_record(self, record):
|
||||
"""
|
||||
Returns a tuple of the time the log record was created, and the
|
||||
message because the buildlogger expects the log messages
|
||||
"""Return a tuple of the time the log record was created, and the message.
|
||||
|
||||
This is necessary because the buildlogger expects the log messages to be
|
||||
formatted in JSON as:
|
||||
|
||||
[ [ <log-time-1>, <log-message-1> ],
|
||||
@ -124,14 +113,11 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
return (record.created, msg)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
"""
|
||||
Convenience method for subclasses to use when making POST requests.
|
||||
"""
|
||||
"""Provide convenience method for subclasses to use when making POST requests."""
|
||||
return self.http_handler.post(*args, **kwargs)
|
||||
|
||||
def _append_logs(self, log_lines):
|
||||
"""
|
||||
Sends a POST request to the handlers endpoint with the logs that have been captured.
|
||||
def _append_logs(self, log_lines): # noqa: D406,D407,D413
|
||||
"""Send a POST request to the handlers endpoint with the logs that have been captured.
|
||||
|
||||
Returns:
|
||||
The number of log lines that have been successfully sent.
|
||||
@ -145,10 +131,8 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
break
|
||||
return lines_sent
|
||||
|
||||
def __append_logs_chunk(self, log_lines_chunk):
|
||||
"""
|
||||
Sends a log lines chunk, handles 413 Request Entity Too Large errors and retries
|
||||
if necessary.
|
||||
def __append_logs_chunk(self, log_lines_chunk): # noqa: D406,D407,D413
|
||||
"""Send log lines chunk, handle 413 Request Entity Too Large errors & retry, if necessary.
|
||||
|
||||
Returns:
|
||||
The number of log lines that have been successfully sent.
|
||||
@ -173,14 +157,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
return self._append_logs(log_lines_chunk)
|
||||
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
|
||||
return 0
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
BUILDLOGGER_FALLBACK.exception("Encountered an error.")
|
||||
return 0
|
||||
|
||||
def _flush_buffer_with_lock(self, buf, close_called):
|
||||
"""
|
||||
Ensures all logging output has been flushed to the buildlogger
|
||||
server.
|
||||
"""Ensure all logging output has been flushed to the buildlogger server.
|
||||
|
||||
If _append_logs() returns false, then the log messages are added
|
||||
to a separate buffer and retried the next time flush() is
|
||||
@ -205,13 +187,12 @@ class _BaseBuildloggerHandler(handlers.BufferedHandler):
|
||||
|
||||
|
||||
class BuildloggerTestHandler(_BaseBuildloggerHandler):
|
||||
"""
|
||||
Buildlogger handler for the test logs.
|
||||
"""
|
||||
"""Buildlogger handler for the test logs."""
|
||||
|
||||
def __init__(self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES,
|
||||
interval_secs=_SEND_AFTER_SECS):
|
||||
"""Initializes the buildlogger handler with the credentials, build id, and test id."""
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, build_config, build_id, test_id, capacity=_SEND_AFTER_LINES,
|
||||
interval_secs=_SEND_AFTER_SECS):
|
||||
"""Initialize the buildlogger handler with the credentials, build id, and test id."""
|
||||
endpoint = APPEND_TEST_LOGS_ENDPOINT % {
|
||||
"build_id": build_id,
|
||||
"test_id": test_id,
|
||||
@ -220,19 +201,14 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
|
||||
|
||||
@_log_on_error
|
||||
def _finish_test(self, failed=False):
|
||||
"""
|
||||
Sends a POST request to the APPEND_TEST_LOGS_ENDPOINT with the
|
||||
test status.
|
||||
"""
|
||||
"""Send a POST request to the APPEND_TEST_LOGS_ENDPOINT with the test status."""
|
||||
self.post(self.endpoint, headers={
|
||||
"X-Sendlogs-Test-Done": "true",
|
||||
"X-Sendlogs-Test-Failed": "true" if failed else "false",
|
||||
})
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Closes the buildlogger handler.
|
||||
"""
|
||||
"""Close the buildlogger handler."""
|
||||
|
||||
_BaseBuildloggerHandler.close(self)
|
||||
|
||||
@ -241,13 +217,11 @@ class BuildloggerTestHandler(_BaseBuildloggerHandler):
|
||||
|
||||
|
||||
class BuildloggerGlobalHandler(_BaseBuildloggerHandler):
|
||||
"""
|
||||
Buildlogger handler for the global logs.
|
||||
"""
|
||||
"""Buildlogger handler for the global logs."""
|
||||
|
||||
def __init__(self, build_config, build_id, capacity=_SEND_AFTER_LINES,
|
||||
interval_secs=_SEND_AFTER_SECS):
|
||||
"""Initializes the buildlogger handler with the credentials and build id."""
|
||||
"""Initialize the buildlogger handler with the credentials and build id."""
|
||||
endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
|
||||
_BaseBuildloggerHandler.__init__(self, build_config, endpoint, capacity, interval_secs)
|
||||
|
||||
@ -261,6 +235,7 @@ class BuildloggerServer(object):
|
||||
|
||||
@_log_on_error
|
||||
def __init__(self):
|
||||
"""Initialize BuildloggerServer."""
|
||||
tmp_globals = {}
|
||||
self.config = {}
|
||||
execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config)
|
||||
@ -277,9 +252,7 @@ class BuildloggerServer(object):
|
||||
|
||||
@_log_on_error
|
||||
def new_build_id(self, suffix):
|
||||
"""
|
||||
Returns a new build id for sending global logs to.
|
||||
"""
|
||||
"""Return a new build id for sending global logs to."""
|
||||
username = self.config["username"]
|
||||
password = self.config["password"]
|
||||
builder = "%s_%s" % (self.config["builder"], suffix)
|
||||
@ -298,9 +271,7 @@ class BuildloggerServer(object):
|
||||
|
||||
@_log_on_error
|
||||
def new_test_id(self, build_id, test_filename, test_command):
|
||||
"""
|
||||
Returns a new test id for sending test logs to.
|
||||
"""
|
||||
"""Return a new test id for sending test logs to."""
|
||||
handler = handlers.HTTPHandler(url_root=_config.BUILDLOGGER_URL,
|
||||
username=self.config["username"],
|
||||
password=self.config["password"])
|
||||
@ -317,19 +288,23 @@ class BuildloggerServer(object):
|
||||
return response["id"]
|
||||
|
||||
def get_global_handler(self, build_id, handler_info):
|
||||
"""Return the global handler."""
|
||||
return BuildloggerGlobalHandler(self.config, build_id, **handler_info)
|
||||
|
||||
def get_test_handler(self, build_id, test_id, handler_info):
|
||||
"""Return the test handler."""
|
||||
return BuildloggerTestHandler(self.config, build_id, test_id, **handler_info)
|
||||
|
||||
@staticmethod
|
||||
def get_build_log_url(build_id):
|
||||
"""Return the build log URL."""
|
||||
base_url = _config.BUILDLOGGER_URL.rstrip("/")
|
||||
endpoint = APPEND_GLOBAL_LOGS_ENDPOINT % {"build_id": build_id}
|
||||
return "%s/%s" % (base_url, endpoint.strip("/"))
|
||||
|
||||
@staticmethod
|
||||
def get_test_log_url(build_id, test_id):
|
||||
"""Return the test log URL."""
|
||||
base_url = _config.BUILDLOGGER_URL.rstrip("/")
|
||||
endpoint = APPEND_TEST_LOGS_ENDPOINT % {"build_id": build_id, "test_id": test_id}
|
||||
return "%s/%s" % (base_url, endpoint.strip("/"))
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""
|
||||
Manages a thread responsible for periodically calling flush() on
|
||||
logging.Handler instances used to send logs to buildlogger.
|
||||
"""Manage a thread responsible for periodically calling flush() on logging.Handler instances.
|
||||
|
||||
These instances are used to send logs to buildlogger.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
@ -16,11 +16,9 @@ _FLUSH_THREAD = None
|
||||
|
||||
|
||||
def start_thread():
|
||||
"""
|
||||
Starts the flush thread.
|
||||
"""
|
||||
"""Start the flush thread."""
|
||||
|
||||
global _FLUSH_THREAD
|
||||
global _FLUSH_THREAD # pylint: disable=global-statement
|
||||
with _FLUSH_THREAD_LOCK:
|
||||
if _FLUSH_THREAD is not None:
|
||||
raise ValueError("FlushThread has already been started")
|
||||
@ -30,9 +28,7 @@ def start_thread():
|
||||
|
||||
|
||||
def stop_thread():
|
||||
"""
|
||||
Signals the flush thread to stop and waits until it does.
|
||||
"""
|
||||
"""Signal the flush thread to stop and wait until it does."""
|
||||
|
||||
with _FLUSH_THREAD_LOCK:
|
||||
if _FLUSH_THREAD is None:
|
||||
@ -44,12 +40,9 @@ def stop_thread():
|
||||
|
||||
|
||||
def flush_after(handler, delay):
|
||||
"""
|
||||
Adds 'handler' to the queue so that it is flushed after 'delay'
|
||||
seconds by the flush thread.
|
||||
"""Add 'handler' to the queue so that it is flushed after 'delay' seconds by the flush thread.
|
||||
|
||||
Returns the scheduled event which may be used for later cancellation
|
||||
(see cancel()).
|
||||
Return the scheduled event which may be used for later cancellation (see cancel()).
|
||||
"""
|
||||
|
||||
if not isinstance(handler, logging.Handler):
|
||||
@ -59,12 +52,9 @@ def flush_after(handler, delay):
|
||||
|
||||
|
||||
def close_later(handler):
|
||||
"""
|
||||
Adds 'handler' to the queue so that it is closed later by the flush
|
||||
thread.
|
||||
"""Add 'handler' to the queue so that it is closed later by the flush thread.
|
||||
|
||||
Returns the scheduled event which may be used for later cancelation
|
||||
(see cancel()).
|
||||
Return the scheduled event which may be used for later cancelation (see cancel()).
|
||||
"""
|
||||
|
||||
if not isinstance(handler, logging.Handler):
|
||||
@ -78,36 +68,27 @@ def close_later(handler):
|
||||
|
||||
|
||||
def cancel(event):
|
||||
"""
|
||||
Attempts to cancel the specified event.
|
||||
"""Attempt to cancel the specified event.
|
||||
|
||||
Returns true if the event was successfully canceled, and returns
|
||||
false otherwise.
|
||||
Returns true if the event was successfully canceled, and returns false otherwise.
|
||||
"""
|
||||
return _FLUSH_THREAD.cancel_event(event)
|
||||
|
||||
|
||||
class _FlushThread(threading.Thread):
|
||||
"""
|
||||
Asynchronously flushes and closes logging handlers.
|
||||
"""
|
||||
"""Asynchronously flush and close logging handlers."""
|
||||
|
||||
_TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the flush thread.
|
||||
"""
|
||||
"""Initialize the flush thread."""
|
||||
|
||||
threading.Thread.__init__(self, name="FlushThread")
|
||||
# Do not wait to flush the logs if interrupted by the user.
|
||||
self.daemon = True
|
||||
|
||||
def interruptible_sleep(secs):
|
||||
"""
|
||||
Waits up to 'secs' seconds or for the
|
||||
'self.__schedule_updated' event to be set.
|
||||
"""
|
||||
"""Wait up to 'secs' seconds or for the 'self.__schedule_updated' event to be set."""
|
||||
|
||||
# Setting 'self.__schedule_updated' in submit() will cause the scheduler to return early
|
||||
# from its 'delayfunc'. This makes it so that if a new event is scheduled with
|
||||
@ -121,9 +102,7 @@ class _FlushThread(threading.Thread):
|
||||
self.__terminated = threading.Event()
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Continuously flushes and closes logging handlers.
|
||||
"""
|
||||
"""Continuously flush and close logging handlers."""
|
||||
|
||||
try:
|
||||
while not (self.__should_stop.is_set() and self.__scheduler.empty()):
|
||||
@ -146,9 +125,9 @@ class _FlushThread(threading.Thread):
|
||||
self.__terminated.set()
|
||||
|
||||
def signal_shutdown(self):
|
||||
"""
|
||||
Indicates to the flush thread that it should exit once its
|
||||
current queue of logging handlers are flushed and closed.
|
||||
"""Indicate to the flush thread that it should exit.
|
||||
|
||||
This will happen once its current queue of logging handlers are flushed and closed.
|
||||
"""
|
||||
|
||||
self.__should_stop.set()
|
||||
@ -158,21 +137,16 @@ class _FlushThread(threading.Thread):
|
||||
self.__schedule_updated.set()
|
||||
|
||||
def await_shutdown(self):
|
||||
"""
|
||||
Waits for the flush thread to finish processing its current
|
||||
queue of logging handlers.
|
||||
"""
|
||||
"""Wait for the flush thread to finish processing its current queue of logging handlers."""
|
||||
|
||||
while not self.__terminated.is_set():
|
||||
# Need to pass a timeout to wait() so that KeyboardInterrupt exceptions are propagated.
|
||||
self.__terminated.wait(_FlushThread._TIMEOUT)
|
||||
|
||||
def submit(self, action, delay):
|
||||
"""
|
||||
Schedules 'action' for 'delay' seconds from now.
|
||||
"""Schedule 'action' for 'delay' seconds from now.
|
||||
|
||||
Returns the scheduled event which may be used for later
|
||||
cancelation (see cancel_event()).
|
||||
Return the scheduled event which may be used for later cancelation (see cancel_event()).
|
||||
"""
|
||||
|
||||
event = self.__scheduler.enter(delay, 0, action, ())
|
||||
@ -180,11 +154,9 @@ class _FlushThread(threading.Thread):
|
||||
return event
|
||||
|
||||
def cancel_event(self, event):
|
||||
"""
|
||||
Attempts to cancel the specified event.
|
||||
"""Attempt to cancel the specified event.
|
||||
|
||||
Returns true if the event was successfully canceled, and returns
|
||||
false otherwise.
|
||||
Return true if the event was successfully canceled, and returns false otherwise.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
@ -1,22 +1,20 @@
|
||||
"""
|
||||
Custom formatters for the logging handlers.
|
||||
"""
|
||||
"""Custom formatters for the logging handlers."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
class ISO8601Formatter(logging.Formatter):
|
||||
"""
|
||||
An ISO 8601 compliant formatter for log messages. It formats the
|
||||
timezone as an hour/minute offset and uses a period as the
|
||||
"""An ISO 8601 compliant formatter for log messages.
|
||||
|
||||
It formats the timezone as an hour/minute offset and uses a period as the
|
||||
millisecond separator in order to match the log messages of MongoDB.
|
||||
"""
|
||||
|
||||
def formatTime(self, record, datefmt=None):
|
||||
"""Return formatted time."""
|
||||
converted_time = self.converter(record.created)
|
||||
|
||||
if datefmt is not None:
|
||||
@ -28,10 +26,7 @@ class ISO8601Formatter(logging.Formatter):
|
||||
|
||||
@staticmethod
|
||||
def _format_timezone_offset(converted_time):
|
||||
"""
|
||||
Returns the timezone as an hour/minute offset in the form
|
||||
"+HHMM" or "-HHMM".
|
||||
"""
|
||||
"""Return the timezone as an hour/minute offset in the form "+HHMM" or "-HHMM"."""
|
||||
|
||||
# Windows treats %z in the format string as %Z, so we compute the hour/minute offset
|
||||
# manually.
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Additional handlers that are used as the base classes of the buildlogger
|
||||
handler.
|
||||
"""
|
||||
"""Additional handlers that are used as the base classes of the buildlogger handler."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -27,17 +24,16 @@ _TIMEOUT_SECS = 10
|
||||
|
||||
|
||||
class BufferedHandler(logging.Handler):
|
||||
"""
|
||||
A handler class that buffers logging records in memory. Whenever
|
||||
each record is added to the buffer, a check is made to see if the
|
||||
buffer should be flushed. If it should, then flush() is expected to
|
||||
do what's needed.
|
||||
"""A handler class that buffers logging records in memory.
|
||||
|
||||
Whenever each record is added to the buffer, a check is made to see if the buffer
|
||||
should be flushed. If it should, then flush() is expected to do what's needed.
|
||||
"""
|
||||
|
||||
def __init__(self, capacity, interval_secs):
|
||||
"""
|
||||
Initializes the handler with the buffer size and timeout after
|
||||
which the buffer is flushed regardless.
|
||||
"""Initialize the handler with the buffer size and timeout.
|
||||
|
||||
These values determine when the buffer is flushed regardless.
|
||||
"""
|
||||
|
||||
logging.Handler.__init__(self)
|
||||
@ -68,18 +64,19 @@ class BufferedHandler(logging.Handler):
|
||||
# close() serialize accesses to 'self.__emit_buffer' in a more granular way via
|
||||
# 'self.__emit_lock'.
|
||||
def createLock(self):
|
||||
"""Create lock."""
|
||||
pass
|
||||
|
||||
def acquire(self):
|
||||
"""Acquire."""
|
||||
pass
|
||||
|
||||
def release(self):
|
||||
"""Release."""
|
||||
pass
|
||||
|
||||
def process_record(self, record):
|
||||
"""
|
||||
Applies a transformation to the record before it gets added to
|
||||
the buffer.
|
||||
def process_record(self, record): # pylint: disable=no-self-use
|
||||
"""Apply a transformation to the record before it gets added to the buffer.
|
||||
|
||||
The default implementation returns 'record' unmodified.
|
||||
"""
|
||||
@ -87,8 +84,7 @@ class BufferedHandler(logging.Handler):
|
||||
return record
|
||||
|
||||
def emit(self, record):
|
||||
"""
|
||||
Emits a record.
|
||||
"""Emit a record.
|
||||
|
||||
Append the record to the buffer after it has been transformed by
|
||||
process_record(). If the length of the buffer is greater than or
|
||||
@ -117,9 +113,7 @@ class BufferedHandler(logging.Handler):
|
||||
self.__flush_scheduled_by_emit = True
|
||||
|
||||
def flush(self):
|
||||
"""
|
||||
Ensures all logging output has been flushed.
|
||||
"""
|
||||
"""Ensure all logging output has been flushed."""
|
||||
|
||||
self.__flush(close_called=False)
|
||||
|
||||
@ -132,9 +126,7 @@ class BufferedHandler(logging.Handler):
|
||||
self.__flush_scheduled_by_emit = False
|
||||
|
||||
def __flush(self, close_called):
|
||||
"""
|
||||
Ensures all logging output has been flushed.
|
||||
"""
|
||||
"""Ensure all logging output has been flushed."""
|
||||
|
||||
with self.__emit_lock:
|
||||
buf = self.__emit_buffer
|
||||
@ -147,18 +139,13 @@ class BufferedHandler(logging.Handler):
|
||||
self._flush_buffer_with_lock(buf, close_called)
|
||||
|
||||
def _flush_buffer_with_lock(self, buf, close_called):
|
||||
"""
|
||||
Ensures all logging output has been flushed.
|
||||
"""
|
||||
"""Ensure all logging output has been flushed."""
|
||||
|
||||
raise NotImplementedError("_flush_buffer_with_lock must be implemented by BufferedHandler"
|
||||
" subclasses")
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Flushes the buffer and tidies up any resources used by this
|
||||
handler.
|
||||
"""
|
||||
"""Flush the buffer and tidies up any resources used by this handler."""
|
||||
|
||||
with self.__emit_lock:
|
||||
if self.__flush_event is not None:
|
||||
@ -170,15 +157,10 @@ class BufferedHandler(logging.Handler):
|
||||
|
||||
|
||||
class HTTPHandler(object):
|
||||
"""
|
||||
A class which sends data to a web server using POST requests.
|
||||
"""
|
||||
"""A class which sends data to a web server using POST requests."""
|
||||
|
||||
def __init__(self, url_root, username, password):
|
||||
"""
|
||||
Initializes the handler with the necessary authentication
|
||||
credentials.
|
||||
"""
|
||||
"""Initialize the handler with the necessary authentication credentials."""
|
||||
|
||||
self.auth_handler = requests.auth.HTTPBasicAuth(username, password)
|
||||
|
||||
@ -188,11 +170,9 @@ class HTTPHandler(object):
|
||||
return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/"))
|
||||
|
||||
def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS):
|
||||
"""
|
||||
Sends a POST request to the specified endpoint with the supplied
|
||||
data.
|
||||
"""Send a POST request to the specified endpoint with the supplied data.
|
||||
|
||||
Returns the response, either as a string or a JSON object based
|
||||
Return the response, either as a string or a JSON object based
|
||||
on the content type.
|
||||
"""
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Module to hold the logger instances themselves.
|
||||
"""
|
||||
"""Module to hold the logger instances themselves."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -20,17 +18,21 @@ EXECUTOR_LOGGER = None
|
||||
|
||||
|
||||
def _build_logger_server(logging_config):
|
||||
"""Create and return a new BuildloggerServer if "buildlogger" is configured as
|
||||
one of the handler class in the configuration, return None otherwise.
|
||||
"""Create and return a new BuildloggerServer.
|
||||
|
||||
This occurs if "buildlogger" is configured as one of the handler class in the configuration,
|
||||
return None otherwise.
|
||||
"""
|
||||
for logger_name in (FIXTURE_LOGGER_NAME, TESTS_LOGGER_NAME):
|
||||
logger_info = logging_config[logger_name]
|
||||
for handler_info in logger_info["handlers"]:
|
||||
if handler_info["class"] == "buildlogger":
|
||||
return buildlogger.BuildloggerServer()
|
||||
return None
|
||||
|
||||
|
||||
def configure_loggers(logging_config):
|
||||
"""Configure the loggers."""
|
||||
buildlogger.BUILDLOGGER_FALLBACK = BaseLogger("buildlogger")
|
||||
# The 'buildlogger' prefix is not added to the fallback logger since the prefix of the original
|
||||
# logger will be there as part of the logged message.
|
||||
@ -39,7 +41,7 @@ def configure_loggers(logging_config):
|
||||
build_logger_server = _build_logger_server(logging_config)
|
||||
fixture_logger = FixtureRootLogger(logging_config, build_logger_server)
|
||||
tests_logger = TestsRootLogger(logging_config, build_logger_server)
|
||||
global EXECUTOR_LOGGER
|
||||
global EXECUTOR_LOGGER # pylint: disable=global-statement
|
||||
EXECUTOR_LOGGER = ExecutorRootLogger(logging_config, build_logger_server, fixture_logger,
|
||||
tests_logger)
|
||||
|
||||
@ -68,7 +70,7 @@ class BaseLogger(logging.Logger):
|
||||
|
||||
@property
|
||||
def build_logger_server(self):
|
||||
"""The configured BuildloggerServer instance, or None."""
|
||||
"""Get the configured BuildloggerServer instance, or None."""
|
||||
if self._build_logger_server:
|
||||
return self._build_logger_server
|
||||
elif self.parent:
|
||||
@ -78,7 +80,7 @@ class BaseLogger(logging.Logger):
|
||||
|
||||
@property
|
||||
def logging_config(self):
|
||||
"""The logging configuration."""
|
||||
"""Get the logging configuration."""
|
||||
if self._logging_config:
|
||||
return self._logging_config
|
||||
elif self.parent:
|
||||
@ -88,6 +90,7 @@ class BaseLogger(logging.Logger):
|
||||
|
||||
@staticmethod
|
||||
def get_formatter(logger_info):
|
||||
"""Return formatter."""
|
||||
log_format = logger_info.get("format", _DEFAULT_FORMAT)
|
||||
return formatters.ISO8601Formatter(fmt=log_format)
|
||||
|
||||
@ -107,7 +110,7 @@ class RootLogger(BaseLogger):
|
||||
|
||||
def _configure(self):
|
||||
if self.name not in self.logging_config:
|
||||
raise ValueError("Logging configuration should contain the %s component", self.name)
|
||||
raise ValueError("Logging configuration should contain the %s component" % self.name)
|
||||
logger_info = self.logging_config[self.name]
|
||||
formatter = self.get_formatter(logger_info)
|
||||
|
||||
@ -158,6 +161,8 @@ class ExecutorRootLogger(RootLogger):
|
||||
|
||||
|
||||
class JobLogger(BaseLogger):
|
||||
"""JobLogger class."""
|
||||
|
||||
def __init__(self, test_kind, job_num, parent, fixture_root_logger):
|
||||
"""Initialize a JobLogger.
|
||||
|
||||
@ -200,7 +205,10 @@ class JobLogger(BaseLogger):
|
||||
|
||||
|
||||
class TestLogger(BaseLogger):
|
||||
def __init__(self, test_name, parent, build_id=None, test_id=None, url=None):
|
||||
"""TestLogger class."""
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, test_name, parent, build_id=None, test_id=None, url=None):
|
||||
"""Initialize a TestLogger.
|
||||
|
||||
:param test_name: the test name.
|
||||
@ -245,6 +253,8 @@ class FixtureRootLogger(RootLogger):
|
||||
|
||||
|
||||
class FixtureLogger(BaseLogger):
|
||||
"""FixtureLogger class."""
|
||||
|
||||
def __init__(self, fixture_class, job_num, build_id, fixture_root_logger):
|
||||
"""Initialize a FixtureLogger.
|
||||
|
||||
@ -277,6 +287,8 @@ class FixtureLogger(BaseLogger):
|
||||
|
||||
|
||||
class FixtureNodeLogger(BaseLogger):
|
||||
"""FixtureNodeLogger class."""
|
||||
|
||||
def __init__(self, fixture_class, job_num, node_name, fixture_logger):
|
||||
"""Initialize a FixtureNodeLogger.
|
||||
|
||||
@ -310,6 +322,8 @@ class TestsRootLogger(RootLogger):
|
||||
|
||||
|
||||
class TestQueueLogger(BaseLogger):
|
||||
"""TestQueueLogger class."""
|
||||
|
||||
def __init__(self, test_kind, tests_root_logger):
|
||||
"""Initialize a TestQueueLogger.
|
||||
|
||||
@ -320,6 +334,8 @@ class TestQueueLogger(BaseLogger):
|
||||
|
||||
|
||||
class HookLogger(BaseLogger):
|
||||
"""HookLogger class."""
|
||||
|
||||
def __init__(self, hook_class, fixture_logger, tests_root_logger):
|
||||
"""Initialize a HookLogger.
|
||||
|
||||
@ -337,9 +353,7 @@ class HookLogger(BaseLogger):
|
||||
|
||||
|
||||
def _fallback_buildlogger_handler(include_logger_name=True):
|
||||
"""
|
||||
Returns a handler that writes to stderr.
|
||||
"""
|
||||
"""Return a handler that writes to stderr."""
|
||||
if include_logger_name:
|
||||
log_format = "[fallback] [%(name)s] %(message)s"
|
||||
else:
|
||||
@ -353,10 +367,7 @@ def _fallback_buildlogger_handler(include_logger_name=True):
|
||||
|
||||
|
||||
def _get_buildlogger_handler_info(logger_info):
|
||||
"""
|
||||
Returns the buildlogger handler information if it exists, and None
|
||||
otherwise.
|
||||
"""
|
||||
"""Return the buildlogger handler information if it exists, and None otherwise."""
|
||||
for handler_info in logger_info["handlers"]:
|
||||
handler_info = handler_info.copy()
|
||||
if handler_info.pop("class") == "buildlogger":
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Parser for command line arguments.
|
||||
"""
|
||||
"""Parser for command line arguments."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -15,10 +13,8 @@ from . import utils
|
||||
from .. import resmokeconfig
|
||||
|
||||
|
||||
def parse_command_line():
|
||||
"""
|
||||
Parses the command line arguments passed to resmoke.py.
|
||||
"""
|
||||
def parse_command_line(): # pylint: disable=too-many-statements
|
||||
"""Parse the command line arguments passed to resmoke.py."""
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
|
||||
@ -304,9 +300,7 @@ def parse_command_line():
|
||||
|
||||
|
||||
def validate_options(parser, options, args):
|
||||
"""
|
||||
Do preliminary validation on the options and error on any invalid options.
|
||||
"""
|
||||
"""Do preliminary validation on the options and error on any invalid options."""
|
||||
|
||||
if options.shell_port is not None and options.shell_conn_string is not None:
|
||||
parser.error("Cannot specify both `shellPort` and `shellConnString`")
|
||||
@ -318,9 +312,7 @@ def validate_options(parser, options, args):
|
||||
|
||||
|
||||
def validate_benchmark_options():
|
||||
"""
|
||||
Some options are incompatible with benchmark test suites, we error out early if any of
|
||||
these options are specified.
|
||||
"""Error out early if any options are incompatible with benchmark test suites.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
@ -338,10 +330,12 @@ def validate_benchmark_options():
|
||||
|
||||
|
||||
def get_logging_config(values):
|
||||
"""Return logging config values."""
|
||||
return _get_logging_config(values.logger_file)
|
||||
|
||||
|
||||
def update_config_vars(values):
|
||||
def update_config_vars(values): # pylint: disable=too-many-statements
|
||||
"""Update config vars."""
|
||||
config = _config.DEFAULTS.copy()
|
||||
|
||||
# Override `config` with values from command line arguments.
|
||||
@ -436,10 +430,7 @@ def update_config_vars(values):
|
||||
|
||||
|
||||
def _get_logging_config(pathname):
|
||||
"""
|
||||
Attempts to read a YAML configuration from 'pathname' that describes
|
||||
how resmoke.py should log the tests and fixtures.
|
||||
"""
|
||||
"""Read YAML configuration from 'pathname' how to log tests and fixtures."""
|
||||
|
||||
# Named loggers are specified as the basename of the file, without the .yml extension.
|
||||
if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname):
|
||||
@ -454,17 +445,14 @@ def _get_logging_config(pathname):
|
||||
|
||||
|
||||
def _expand_user(pathname):
|
||||
"""
|
||||
Wrapper around os.path.expanduser() to do nothing when given None.
|
||||
"""
|
||||
"""Provide wrapper around os.path.expanduser() to do nothing when given None."""
|
||||
if pathname is None:
|
||||
return None
|
||||
return os.path.expanduser(pathname)
|
||||
|
||||
|
||||
def _tags_from_list(tags_list):
|
||||
"""
|
||||
Returns the list of tags from a list of tag parameter values.
|
||||
"""Return the list of tags from a list of tag parameter values.
|
||||
|
||||
Each parameter value in the list may be a list of comma separated tags, with empty strings
|
||||
ignored.
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Manages interactions with the report.json file.
|
||||
"""
|
||||
"""Manage interactions with the report.json file."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -11,10 +9,7 @@ from .testing import report as _report
|
||||
|
||||
|
||||
def write(suites):
|
||||
"""
|
||||
Writes the combined report of all executions if --reportFile was
|
||||
specified on the command line.
|
||||
"""
|
||||
"""Write the combined report of all executions if --reportFile was specified."""
|
||||
|
||||
if config.REPORT_FILE is None:
|
||||
return
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Test selection utility.
|
||||
"""Test selection utility.
|
||||
|
||||
Defines filtering rules for what tests to include in a suite depending
|
||||
on whether they apply to C++ unit tests, dbtests, or JS tests.
|
||||
@ -32,15 +31,17 @@ class TestFileExplorer(object):
|
||||
The file related code has been confined to this class for testability.
|
||||
"""
|
||||
|
||||
def is_glob_pattern(self, path):
|
||||
"""Indicates if the provided path is a glob pattern.
|
||||
@staticmethod
|
||||
def is_glob_pattern(path):
|
||||
"""Indicate if the provided path is a glob pattern.
|
||||
|
||||
See buildscripts.resmokelib.utils.globstar.is_glob_pattern().
|
||||
"""
|
||||
return globstar.is_glob_pattern(path)
|
||||
|
||||
def iglob(self, pattern):
|
||||
"""Expands the given glob pattern with regard to the current working directory.
|
||||
@staticmethod
|
||||
def iglob(pattern): # noqa: D406,D407,D411,D413
|
||||
"""Expand the given glob pattern with regard to the current working directory.
|
||||
|
||||
See buildscripts.resmokelib.utils.globstar.iglob().
|
||||
Returns:
|
||||
@ -48,8 +49,9 @@ class TestFileExplorer(object):
|
||||
"""
|
||||
return globstar.iglob(pattern)
|
||||
|
||||
def jstest_tags(self, file_path):
|
||||
"""Extracts the tags from a JavaScript test file.
|
||||
@staticmethod
|
||||
def jstest_tags(file_path): # noqa: D406,D407,D411,D413
|
||||
"""Extract the tags from a JavaScript test file.
|
||||
|
||||
See buildscripts.resmokelib.utils.jscomment.get_tags().
|
||||
Returns:
|
||||
@ -57,8 +59,9 @@ class TestFileExplorer(object):
|
||||
"""
|
||||
return jscomment.get_tags(file_path)
|
||||
|
||||
def read_root_file(self, root_file_path):
|
||||
"""Reads a file containing the list of root test files.
|
||||
@staticmethod
|
||||
def read_root_file(root_file_path): # noqa: D406,D407,D411,D413
|
||||
"""Read a file containing the list of root test files.
|
||||
|
||||
Args:
|
||||
root_file_path: the path to a file containing the path of each test on a separate line.
|
||||
@ -72,19 +75,21 @@ class TestFileExplorer(object):
|
||||
tests.append(test_path)
|
||||
return tests
|
||||
|
||||
def fnmatchcase(self, name, pattern):
|
||||
"""Indicates if the given name matches the given pattern.
|
||||
@staticmethod
|
||||
def fnmatchcase(name, pattern):
|
||||
"""Indicate if the given name matches the given pattern.
|
||||
|
||||
See buildscripts.resmokelib.utils.fnmatch.fnmatchcase().
|
||||
"""
|
||||
return fnmatch.fnmatchcase(name, pattern)
|
||||
|
||||
def isfile(self, path):
|
||||
"""Indicates if the given path corresponds to an existing file."""
|
||||
@staticmethod
|
||||
def isfile(path):
|
||||
"""Indicate if the given path corresponds to an existing file."""
|
||||
return os.path.isfile(path)
|
||||
|
||||
def list_dbtests(self, dbtest_binary):
|
||||
"""Lists the available dbtests suites."""
|
||||
"""List the available dbtests suites."""
|
||||
returncode, stdout = self._run_program(dbtest_binary, ["--list"])
|
||||
|
||||
if returncode != 0:
|
||||
@ -92,8 +97,9 @@ class TestFileExplorer(object):
|
||||
|
||||
return stdout.splitlines()
|
||||
|
||||
def _run_program(self, binary, args):
|
||||
"""Runs a program.
|
||||
@staticmethod
|
||||
def _run_program(binary, args): # noqa: D406,D407,D411,D413
|
||||
"""Run a program.
|
||||
|
||||
Args:
|
||||
binary: the binary to run.
|
||||
@ -108,9 +114,11 @@ class TestFileExplorer(object):
|
||||
|
||||
return program.returncode, stdout
|
||||
|
||||
def parse_tag_file(self, test_kind):
|
||||
"""
|
||||
Parses the tag file and return a dict of tagged tests, with the key the filename and the
|
||||
@staticmethod
|
||||
def parse_tag_file(test_kind):
|
||||
"""Parse the tag file and return a dict of tagged tests.
|
||||
|
||||
The resulting dict will have as a key the filename and the
|
||||
value a list of tags, i.e., {'file1.js': ['tag1', 'tag2'], 'file2.js': ['tag2', 'tag3']}.
|
||||
"""
|
||||
tagged_tests = collections.defaultdict(list)
|
||||
@ -141,7 +149,7 @@ class _TestList(object):
|
||||
"""
|
||||
|
||||
def __init__(self, test_file_explorer, roots, tests_are_files=True):
|
||||
"""Initializes the _TestList with a TestFileExplorer component and a list of root tests."""
|
||||
"""Initialize the _TestList with a TestFileExplorer component and a list of root tests."""
|
||||
self._test_file_explorer = test_file_explorer
|
||||
self._tests_are_files = tests_are_files
|
||||
self._roots = self._expand_files(roots) if tests_are_files else roots
|
||||
@ -159,12 +167,12 @@ class _TestList(object):
|
||||
return expanded_tests
|
||||
|
||||
def include_files(self, include_files, force=False):
|
||||
"""Filters the test list so that it only includes files matching 'include_files'.
|
||||
"""Filter the test list so that it only includes files matching 'include_files'.
|
||||
|
||||
Args:
|
||||
include_files: a list of paths or glob patterns that match the files to include.
|
||||
force: if True include the matching files that were previously excluded, otherwise
|
||||
only include files that match and were not previously excluded from this _TestList.
|
||||
Args:
|
||||
include_files: a list of paths or glob patterns that match the files to include.
|
||||
force: if True include the matching files that were previously excluded, otherwise only
|
||||
include files that match and were not previously excluded from this _TestList.
|
||||
"""
|
||||
if not self._tests_are_files:
|
||||
raise TypeError("_TestList does not contain files.")
|
||||
@ -178,8 +186,8 @@ class _TestList(object):
|
||||
if force:
|
||||
self._filtered |= set(self._roots) & expanded_include_files
|
||||
|
||||
def exclude_files(self, exclude_files):
|
||||
"""Excludes from the test list the files that match elements from 'exclude_files'.
|
||||
def exclude_files(self, exclude_files): # noqa: D406,D407,D411,D413
|
||||
"""Exclude from the test list the files that match elements from 'exclude_files'.
|
||||
|
||||
Args:
|
||||
exclude_files: a list of paths or glob patterns that match the files to exclude.
|
||||
@ -201,7 +209,7 @@ class _TestList(object):
|
||||
self._filtered.discard(path)
|
||||
|
||||
def match_tag_expression(self, tag_expression, get_tags):
|
||||
"""Filters the test list to only include tests that match the tag expression.
|
||||
"""Filter the test list to only include tests that match the tag expression.
|
||||
|
||||
Args:
|
||||
tag_expression: a callable object that takes a list of tags and indicate if the required
|
||||
@ -212,11 +220,10 @@ class _TestList(object):
|
||||
self._filtered = {test for test in self._filtered if tag_expression(get_tags(test))}
|
||||
|
||||
def include_any_pattern(self, patterns):
|
||||
"""
|
||||
Filters the test list to only include tests that match any of the given glob patterns.
|
||||
"""
|
||||
"""Filter the test list to only include tests that match any provided glob patterns."""
|
||||
|
||||
def match(test):
|
||||
"""Return True if 'test' matches a pattern."""
|
||||
for pattern in patterns:
|
||||
if test == pattern or fnmatch.fnmatchcase(test, pattern):
|
||||
return True
|
||||
@ -225,8 +232,7 @@ class _TestList(object):
|
||||
self._filtered = {test for test in self._filtered if match(test)}
|
||||
|
||||
def get_tests(self):
|
||||
"""
|
||||
Returns the test list as a list(str).
|
||||
"""Return the test list as a list(str).
|
||||
|
||||
The tests are returned in the same order as they are found in the root tests.
|
||||
"""
|
||||
@ -287,7 +293,7 @@ class _MatchExpression(object):
|
||||
|
||||
|
||||
def make_expression(conf):
|
||||
"""Creates a tag matching expression from an expression configuration.
|
||||
"""Create a tag matching expression from an expression configuration.
|
||||
|
||||
The syntax for the expression configuration is:
|
||||
- expr: str_expr | dict_expr
|
||||
@ -325,11 +331,10 @@ def _make_expression_list(configs):
|
||||
class _SelectorConfig(object):
|
||||
"""Base object to represent the configuration for test selection."""
|
||||
|
||||
def __init__(self, root=None, roots=None, include_files=None, exclude_files=None,
|
||||
include_tags=None, exclude_tags=None, include_with_any_tags=None,
|
||||
exclude_with_any_tags=None):
|
||||
"""
|
||||
Initializes the _SelectorConfig from the configuration elements.
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, root=None, roots=None, include_files=None, exclude_files=None, include_tags=None,
|
||||
exclude_tags=None, include_with_any_tags=None, exclude_with_any_tags=None):
|
||||
"""Initialize the _SelectorConfig from the configuration elements.
|
||||
|
||||
Args:
|
||||
root: the path to a file containing the list of root tests. Incompatible with 'roots'.
|
||||
@ -367,10 +372,8 @@ class _SelectorConfig(object):
|
||||
return set(list_b)
|
||||
elif list_b is None:
|
||||
return set(list_a)
|
||||
else:
|
||||
return set(list_a) | set(list_b)
|
||||
else:
|
||||
return None
|
||||
return set(list_a) | set(list_b)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __make_tags_expression(include_tags, exclude_tags, include_with_any_tags,
|
||||
@ -389,16 +392,14 @@ class _SelectorConfig(object):
|
||||
|
||||
if expressions:
|
||||
return _AllOfExpression(expressions)
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
class _Selector(object):
|
||||
"""Selection algorithm to select tests matching a selector configuration."""
|
||||
|
||||
def __init__(self, test_file_explorer, tests_are_files=True):
|
||||
"""
|
||||
Initializes the _Selector.
|
||||
"""Initialize the _Selector.
|
||||
|
||||
Args:
|
||||
test_file_explorer: a TestFileExplorer instance.
|
||||
@ -406,7 +407,7 @@ class _Selector(object):
|
||||
self._test_file_explorer = test_file_explorer
|
||||
self._tests_are_files = tests_are_files
|
||||
|
||||
def select(self, selector_config):
|
||||
def select(self, selector_config): # noqa: D406,D407,D411,D413
|
||||
"""Select the test files that match the given configuration.
|
||||
|
||||
Args:
|
||||
@ -434,17 +435,18 @@ class _Selector(object):
|
||||
test_list.include_files(selector_config.include_files, force=True)
|
||||
return test_list.get_tests()
|
||||
|
||||
def get_tags(self, test_file):
|
||||
"""Retrieves the tags associated with the give test file."""
|
||||
@staticmethod
|
||||
def get_tags(test_file): # pylint: disable=unused-argument
|
||||
"""Retrieve the tags associated with the give test file."""
|
||||
return []
|
||||
|
||||
|
||||
class _JSTestSelectorConfig(_SelectorConfig):
|
||||
"""_SelectorConfig subclass for js_test tests."""
|
||||
|
||||
def __init__(self, roots=None, include_files=None, exclude_files=None,
|
||||
include_with_any_tags=None, exclude_with_any_tags=None, include_tags=None,
|
||||
exclude_tags=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, roots=None, include_files=None, exclude_files=None, include_with_any_tags=None,
|
||||
exclude_with_any_tags=None, include_tags=None, exclude_tags=None):
|
||||
_SelectorConfig.__init__(self, roots=roots, include_files=include_files,
|
||||
exclude_files=exclude_files,
|
||||
include_with_any_tags=include_with_any_tags,
|
||||
@ -460,6 +462,7 @@ class _JSTestSelector(_Selector):
|
||||
self._tags = self._test_file_explorer.parse_tag_file("js_test")
|
||||
|
||||
def get_tags(self, test_file):
|
||||
"""Return tags from test_file."""
|
||||
file_tags = self._test_file_explorer.jstest_tags(test_file)
|
||||
if test_file in self._tags:
|
||||
return list(set(file_tags) | set(self._tags[test_file]))
|
||||
@ -471,6 +474,7 @@ class _CppTestSelectorConfig(_SelectorConfig):
|
||||
|
||||
def __init__(self, root=config.DEFAULT_INTEGRATION_TEST_LIST, roots=None, include_files=None,
|
||||
exclude_files=None):
|
||||
"""Initialize _CppTestSelectorConfig."""
|
||||
if roots:
|
||||
# The 'roots' argument is only present when tests are specified on the command line
|
||||
# and in that case they take precedence over the tests in the root file.
|
||||
@ -485,9 +489,11 @@ class _CppTestSelector(_Selector):
|
||||
"""_Selector subclass for cpp_integration_test and cpp_unit_test tests."""
|
||||
|
||||
def __init__(self, test_file_explorer):
|
||||
"""Initialize _CppTestSelector."""
|
||||
_Selector.__init__(self, test_file_explorer)
|
||||
|
||||
def select(self, selector_config):
|
||||
"""Return selected tests."""
|
||||
if selector_config.roots:
|
||||
# Tests have been specified on the command line. We use them without additional
|
||||
# filtering.
|
||||
@ -500,6 +506,7 @@ class _DbTestSelectorConfig(_SelectorConfig):
|
||||
"""_Selector config subclass for db_test tests."""
|
||||
|
||||
def __init__(self, binary=None, roots=None, include_suites=None):
|
||||
"""Initialize _DbTestSelectorConfig."""
|
||||
_SelectorConfig.__init__(self, roots=roots)
|
||||
self.include_suites = utils.default_if_none(include_suites, [])
|
||||
|
||||
@ -517,9 +524,11 @@ class _DbTestSelector(_Selector):
|
||||
"""_Selector subclass for db_test tests."""
|
||||
|
||||
def __init__(self, test_file_explorer):
|
||||
"""Initialize _DbTestSelector."""
|
||||
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
|
||||
|
||||
def select(self, selector_config):
|
||||
"""Return selected tests."""
|
||||
if selector_config.roots:
|
||||
roots = selector_config.roots
|
||||
else:
|
||||
@ -550,6 +559,7 @@ class _JsonSchemaTestSelectorConfig(_SelectorConfig):
|
||||
"""_SelectorConfig subclass for json_schema_test tests."""
|
||||
|
||||
def __init__(self, roots, include_files=None, exclude_files=None):
|
||||
"""Initialize _JsonSchemaTestSelectorConfig."""
|
||||
_SelectorConfig.__init__(self, roots=roots, include_files=include_files,
|
||||
exclude_files=exclude_files)
|
||||
|
||||
@ -558,6 +568,7 @@ class _SleepTestCaseSelectorConfig(_SelectorConfig):
|
||||
"""_SelectorConfig subclass for sleep_test tests."""
|
||||
|
||||
def __init__(self, roots):
|
||||
"""Initialize _SleepTestCaseSelectorConfig."""
|
||||
_SelectorConfig.__init__(self, roots=roots)
|
||||
|
||||
|
||||
@ -565,6 +576,7 @@ class _SleepTestCaseSelector(_Selector):
|
||||
"""_Selector subclass for sleep_test tests."""
|
||||
|
||||
def __init__(self, test_file_explorer):
|
||||
"""Initialize _SleepTestCaseSelector."""
|
||||
_Selector.__init__(self, test_file_explorer, tests_are_files=False)
|
||||
|
||||
|
||||
@ -596,7 +608,7 @@ _SELECTOR_REGISTRY = {
|
||||
|
||||
|
||||
def filter_tests(test_kind, selector_config, test_file_explorer=_DEFAULT_TEST_FILE_EXPLORER):
|
||||
"""Filters the tests according to a specified configuration.
|
||||
"""Filter the tests according to a specified configuration.
|
||||
|
||||
Args:
|
||||
test_kind: the test kind, one of 'cpp_integration_test', 'cpp_unit_test', 'db_test',
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Utility to support asynchronously signaling the current process.
|
||||
"""
|
||||
"""Utility to support asynchronously signaling the current process."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -12,25 +10,23 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
_is_windows = (sys.platform == "win32")
|
||||
if _is_windows:
|
||||
_IS_WINDOWS = (sys.platform == "win32")
|
||||
if _IS_WINDOWS:
|
||||
import win32api
|
||||
import win32event
|
||||
|
||||
from . import reportfile
|
||||
from . import testing
|
||||
from . import reportfile # pylint: disable=wrong-import-position
|
||||
from . import testing # pylint: disable=wrong-import-position
|
||||
|
||||
|
||||
def register(logger, suites, start_time):
|
||||
"""
|
||||
On Windows, set up an event object to wait for signal, otherwise, register a signal handler
|
||||
for the SIGUSR1 signal.
|
||||
"""
|
||||
"""Register an event object to wait for signal, or a signal handler for SIGUSR1."""
|
||||
|
||||
def _handle_sigusr1(signum, frame):
|
||||
"""
|
||||
Signal handler that will dump the stacks of all threads and
|
||||
then write out the report file and log suite summaries.
|
||||
def _handle_sigusr1(signum, frame): # pylint: disable=unused-argument
|
||||
"""Signal handler for SIGUSR1.
|
||||
|
||||
The handler will dump the stacks of all threads and write out the report file and
|
||||
log suite summaries.
|
||||
"""
|
||||
|
||||
header_msg = "Dumping stacks due to SIGUSR1 signal"
|
||||
@ -38,9 +34,10 @@ def register(logger, suites, start_time):
|
||||
_dump_and_log(header_msg)
|
||||
|
||||
def _handle_set_event(event_handle):
|
||||
"""
|
||||
Windows event object handler that will dump the stacks of all threads and then write out
|
||||
the report file and log suite summaries.
|
||||
"""Event object handler for Windows.
|
||||
|
||||
The handler will dump the stacks of all threads and write out the report file and
|
||||
log suite summaries.
|
||||
"""
|
||||
|
||||
while True:
|
||||
@ -58,9 +55,7 @@ def register(logger, suites, start_time):
|
||||
_dump_and_log(header_msg)
|
||||
|
||||
def _dump_and_log(header_msg):
|
||||
"""
|
||||
Dumps the stacks of all threads, writes the report file, and logs the suite summaries.
|
||||
"""
|
||||
"""Dump the stacks of all threads, write report file, and log suite summaries."""
|
||||
_dump_stacks(logger, header_msg)
|
||||
reportfile.write(suites)
|
||||
|
||||
@ -68,7 +63,7 @@ def register(logger, suites, start_time):
|
||||
|
||||
# On Windows spawn a thread to wait on an event object for signal to dump stacks. For Cygwin
|
||||
# platforms, we use a signal handler since it supports POSIX signals.
|
||||
if _is_windows:
|
||||
if _IS_WINDOWS:
|
||||
# Create unique event_name.
|
||||
event_name = "Global\\Mongo_Python_" + str(os.getpid())
|
||||
|
||||
@ -97,14 +92,12 @@ def register(logger, suites, start_time):
|
||||
|
||||
|
||||
def _dump_stacks(logger, header_msg):
|
||||
"""
|
||||
Signal handler that will dump the stacks of all threads.
|
||||
"""
|
||||
"""Signal handler that will dump the stacks of all threads."""
|
||||
|
||||
sb = []
|
||||
sb.append(header_msg)
|
||||
|
||||
frames = sys._current_frames()
|
||||
frames = sys._current_frames() # pylint: disable=protected-access
|
||||
sb.append("Total threads: %d" % (len(frames)))
|
||||
sb.append("")
|
||||
|
||||
|
@ -14,9 +14,7 @@ from .. import resmokeconfig
|
||||
|
||||
|
||||
def get_named_suites():
|
||||
"""
|
||||
Returns the list of suites available to execute.
|
||||
"""
|
||||
"""Return the list of suites available to execute."""
|
||||
|
||||
# Skip "with_*server" and "no_server" because they do not define any test files to run.
|
||||
executor_only = {"with_server", "with_external_server", "no_server"}
|
||||
@ -26,8 +24,7 @@ def get_named_suites():
|
||||
|
||||
|
||||
def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
|
||||
"""
|
||||
Returns a dict keyed by test name containing all of the suites that will run that test.
|
||||
"""Return a dict keyed by test name containing all of the suites that will run that test.
|
||||
|
||||
If 'test_kind' is specified then only the mappings for that kind are returned.
|
||||
Since this iterates through every available suite, it should only be run once.
|
||||
@ -59,7 +56,7 @@ def create_test_membership_map(fail_on_missing_selector=False, test_kind=None):
|
||||
|
||||
|
||||
def get_suites(suite_files, test_files):
|
||||
"""Retrieves the Suite instances based on suite configuration files and override parameters.
|
||||
"""Retrieve the Suite instances based on suite configuration files and override parameters.
|
||||
|
||||
Args:
|
||||
suite_files: A list of file paths pointing to suite YAML configuration files. For the suites
|
||||
@ -93,10 +90,7 @@ def _make_suite_roots(files):
|
||||
|
||||
|
||||
def _get_suite_config(pathname):
|
||||
"""
|
||||
Attempts to read a YAML configuration from 'pathname' that describes
|
||||
what tests to run and how to run them.
|
||||
"""
|
||||
"""Attempt to read YAML configuration from 'pathname' for the suite."""
|
||||
return _get_yaml_config("suite", pathname)
|
||||
|
||||
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Extension to the unittest package to support buildlogger and parallel
|
||||
test execution.
|
||||
"""
|
||||
"""Extension to the unittest package to support buildlogger and parallel test execution."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Driver of the test execution framework.
|
||||
"""
|
||||
"""Driver of the test execution framework."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -21,8 +19,7 @@ from ..utils import queue as _queue
|
||||
|
||||
|
||||
class TestSuiteExecutor(object):
|
||||
"""
|
||||
Executes a test suite.
|
||||
"""Execute a test suite.
|
||||
|
||||
Responsible for setting up and tearing down the fixtures that the
|
||||
tests execute against.
|
||||
@ -30,11 +27,10 @@ class TestSuiteExecutor(object):
|
||||
|
||||
_TIMEOUT = 24 * 60 * 60 # =1 day (a long time to have tests run)
|
||||
|
||||
def __init__(self, exec_logger, suite, config=None, fixture=None, hooks=None,
|
||||
archive_instance=None, archive=None):
|
||||
"""
|
||||
Initializes the TestSuiteExecutor with the test suite to run.
|
||||
"""
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, exec_logger, suite, config=None, fixture=None, hooks=None, archive_instance=None,
|
||||
archive=None):
|
||||
"""Initialize the TestSuiteExecutor with the test suite to run."""
|
||||
self.logger = exec_logger
|
||||
|
||||
if _config.SHELL_CONN_STRING is not None:
|
||||
@ -69,8 +65,7 @@ class TestSuiteExecutor(object):
|
||||
self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)]
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Executes the test suite.
|
||||
"""Execute the test suite.
|
||||
|
||||
Any exceptions that occur during setting up or tearing down a
|
||||
fixture are propagated.
|
||||
@ -128,9 +123,7 @@ class TestSuiteExecutor(object):
|
||||
self._suite.return_code = return_code
|
||||
|
||||
def _setup_fixtures(self):
|
||||
"""
|
||||
Sets up a fixture for each job.
|
||||
"""
|
||||
"""Set up a fixture for each job."""
|
||||
|
||||
# We reset the internal state of the PortAllocator before calling job.fixture.setup() so
|
||||
# that ports used by the fixture during a test suite run earlier can be reused during this
|
||||
@ -140,7 +133,7 @@ class TestSuiteExecutor(object):
|
||||
for job in self._jobs:
|
||||
try:
|
||||
job.fixture.setup()
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
self.logger.exception("Encountered an error while setting up %s.", job.fixture)
|
||||
return False
|
||||
|
||||
@ -148,16 +141,14 @@ class TestSuiteExecutor(object):
|
||||
for job in self._jobs:
|
||||
try:
|
||||
job.fixture.await_ready()
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
self.logger.exception("Encountered an error while waiting for %s to be ready",
|
||||
job.fixture)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _run_tests(self, test_queue, teardown_flag):
|
||||
"""
|
||||
Starts a thread for each Job instance and blocks until all of
|
||||
the tests are run.
|
||||
"""Start a thread for each Job instance and block until all of the tests are run.
|
||||
|
||||
Returns a (combined report, user interrupted) pair, where the
|
||||
report contains the status and timing information of tests run
|
||||
@ -170,12 +161,12 @@ class TestSuiteExecutor(object):
|
||||
try:
|
||||
# Run each Job instance in its own thread.
|
||||
for job in self._jobs:
|
||||
t = threading.Thread(target=job, args=(test_queue, interrupt_flag),
|
||||
kwargs=dict(teardown_flag=teardown_flag))
|
||||
thr = threading.Thread(target=job, args=(test_queue, interrupt_flag),
|
||||
kwargs=dict(teardown_flag=teardown_flag))
|
||||
# Do not wait for tests to finish executing if interrupted by the user.
|
||||
t.daemon = True
|
||||
t.start()
|
||||
threads.append(t)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
threads.append(thr)
|
||||
# SERVER-24729 Need to stagger when jobs start to reduce I/O load if there
|
||||
# are many of them. Both the 5 and the 10 are arbitrary.
|
||||
# Currently only enabled on Evergreen.
|
||||
@ -192,8 +183,8 @@ class TestSuiteExecutor(object):
|
||||
user_interrupted = True
|
||||
else:
|
||||
# Only wait for all the Job instances if not interrupted by the user.
|
||||
for t in threads:
|
||||
t.join()
|
||||
for thr in threads:
|
||||
thr.join()
|
||||
|
||||
reports = [job.report for job in self._jobs]
|
||||
combined_report = _report.TestReport.combine(*reports)
|
||||
@ -204,8 +195,7 @@ class TestSuiteExecutor(object):
|
||||
return (combined_report, user_interrupted)
|
||||
|
||||
def _teardown_fixtures(self):
|
||||
"""
|
||||
Tears down all of the fixtures.
|
||||
"""Tear down all of the fixtures.
|
||||
|
||||
Returns true if all fixtures were torn down successfully, and
|
||||
false otherwise.
|
||||
@ -217,15 +207,13 @@ class TestSuiteExecutor(object):
|
||||
except errors.ServerFailure as err:
|
||||
self.logger.warn("Teardown of %s was not successful: %s", job.fixture, err)
|
||||
success = False
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
self.logger.exception("Encountered an error while tearing down %s.", job.fixture)
|
||||
success = False
|
||||
return success
|
||||
|
||||
def _make_fixture(self, job_num, job_logger):
|
||||
"""
|
||||
Creates a fixture for a job.
|
||||
"""
|
||||
"""Create a fixture for a job."""
|
||||
|
||||
fixture_config = {}
|
||||
fixture_class = fixtures.NOOP_FIXTURE_CLASS
|
||||
@ -238,10 +226,8 @@ class TestSuiteExecutor(object):
|
||||
|
||||
return fixtures.make_fixture(fixture_class, fixture_logger, job_num, **fixture_config)
|
||||
|
||||
def _make_hooks(self, job_num, fixture):
|
||||
"""
|
||||
Creates the hooks for the job's fixture.
|
||||
"""
|
||||
def _make_hooks(self, fixture):
|
||||
"""Create the hooks for the job's fixture."""
|
||||
|
||||
hooks = []
|
||||
|
||||
@ -256,22 +242,18 @@ class TestSuiteExecutor(object):
|
||||
return hooks
|
||||
|
||||
def _make_job(self, job_num):
|
||||
"""
|
||||
Returns a Job instance with its own fixture, hooks, and test
|
||||
report.
|
||||
"""
|
||||
"""Return a Job instance with its own fixture, hooks, and test report."""
|
||||
job_logger = self.logger.new_job_logger(self._suite.test_kind, job_num)
|
||||
|
||||
fixture = self._make_fixture(job_num, job_logger)
|
||||
hooks = self._make_hooks(job_num, fixture)
|
||||
hooks = self._make_hooks(fixture)
|
||||
|
||||
report = _report.TestReport(job_logger, self._suite.options)
|
||||
|
||||
return _job.Job(job_logger, fixture, hooks, report, self.archival, self._suite.options)
|
||||
|
||||
def _make_test_queue(self):
|
||||
"""
|
||||
Returns a queue of TestCase instances.
|
||||
"""Return a queue of TestCase instances.
|
||||
|
||||
Use a multi-consumer queue instead of a unittest.TestSuite so
|
||||
that the test cases can be dispatched to multiple threads.
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Fixtures for executing JSTests against.
|
||||
"""
|
||||
"""Fixture for executing JSTests against."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -12,4 +10,4 @@ NOOP_FIXTURE_CLASS = _NoOpFixture.REGISTERED_NAME
|
||||
|
||||
# We dynamically load all modules in the fixtures/ package so that any Fixture classes declared
|
||||
# within them are automatically registered.
|
||||
_autoloader.load_all_modules(name=__name__, path=__path__)
|
||||
_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Interface of the different fixtures for executing JSTests against.
|
||||
"""
|
||||
"""Interface of the different fixtures for executing JSTests against."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -16,13 +14,11 @@ from ... import logging
|
||||
from ... import utils
|
||||
from ...utils import registry
|
||||
|
||||
_FIXTURES = {}
|
||||
_FIXTURES = {} # type: ignore
|
||||
|
||||
|
||||
def make_fixture(class_name, *args, **kwargs):
|
||||
"""
|
||||
Factory function for creating Fixture instances.
|
||||
"""
|
||||
"""Provide factory function for creating Fixture instances."""
|
||||
|
||||
if class_name not in _FIXTURES:
|
||||
raise ValueError("Unknown fixture class '%s'" % class_name)
|
||||
@ -30,20 +26,16 @@ def make_fixture(class_name, *args, **kwargs):
|
||||
|
||||
|
||||
class Fixture(object):
|
||||
"""
|
||||
Base class for all fixtures.
|
||||
"""
|
||||
"""Base class for all fixtures."""
|
||||
|
||||
__metaclass__ = registry.make_registry_metaclass(_FIXTURES)
|
||||
__metaclass__ = registry.make_registry_metaclass(_FIXTURES) # type: ignore
|
||||
|
||||
# We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute
|
||||
# is defined for all subclasses of Fixture.
|
||||
REGISTERED_NAME = "Fixture"
|
||||
|
||||
def __init__(self, logger, job_num, dbpath_prefix=None):
|
||||
"""
|
||||
Initializes the fixture with a logger instance.
|
||||
"""
|
||||
"""Initialize the fixture with a logger instance."""
|
||||
|
||||
if not isinstance(logger, logging.Logger):
|
||||
raise TypeError("logger must be a Logger instance")
|
||||
@ -61,20 +53,15 @@ class Fixture(object):
|
||||
self._dbpath_prefix = os.path.join(dbpath_prefix, "job{}".format(self.job_num))
|
||||
|
||||
def setup(self):
|
||||
"""
|
||||
Creates the fixture.
|
||||
"""
|
||||
"""Create the fixture."""
|
||||
pass
|
||||
|
||||
def await_ready(self):
|
||||
"""
|
||||
Blocks until the fixture can be used for testing.
|
||||
"""
|
||||
"""Block until the fixture can be used for testing."""
|
||||
pass
|
||||
|
||||
def teardown(self, finished=False):
|
||||
"""
|
||||
Destroys the fixture.
|
||||
def teardown(self, finished=False): # noqa
|
||||
"""Destroy the fixture.
|
||||
|
||||
The fixture's logging handlers are closed if 'finished' is true,
|
||||
which should happen when setup() won't be called again.
|
||||
@ -92,9 +79,8 @@ class Fixture(object):
|
||||
# want the logs to eventually get flushed.
|
||||
logging.flush.close_later(handler)
|
||||
|
||||
def _do_teardown(self):
|
||||
"""
|
||||
Destroys the fixture.
|
||||
def _do_teardown(self): # noqa
|
||||
"""Destroy the fixture.
|
||||
|
||||
This method must be implemented by subclasses.
|
||||
|
||||
@ -103,36 +89,32 @@ class Fixture(object):
|
||||
"""
|
||||
pass
|
||||
|
||||
def is_running(self):
|
||||
"""
|
||||
Returns true if the fixture is still operating and more tests
|
||||
can be run, and false otherwise.
|
||||
"""
|
||||
def is_running(self): # pylint: disable=no-self-use
|
||||
"""Return true if the fixture is still operating and more tests and can be run."""
|
||||
return True
|
||||
|
||||
def get_dbpath_prefix(self):
|
||||
"""Return dbpath prefix."""
|
||||
return self._dbpath_prefix
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""
|
||||
Returns the connection string for this fixture. This is NOT a
|
||||
driver connection string, but a connection string of the format
|
||||
"""Return the connection string for this fixture.
|
||||
|
||||
This is NOT a driver connection string, but a connection string of the format
|
||||
expected by the mongo::ConnectionString class.
|
||||
"""
|
||||
raise NotImplementedError("get_connection_string must be implemented by Fixture subclasses")
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""
|
||||
Return the mongodb connection string as defined here:
|
||||
"""Return the mongodb connection string as defined below.
|
||||
|
||||
https://docs.mongodb.com/manual/reference/connection-string/
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"get_driver_connection_url must be implemented by Fixture subclasses")
|
||||
|
||||
def mongo_client(self, read_preference=pymongo.ReadPreference.PRIMARY, timeout_millis=30000):
|
||||
"""
|
||||
Returns a pymongo.MongoClient connecting to this fixture with a read
|
||||
preference of 'read_preference'.
|
||||
"""Return a pymongo.MongoClient connecting to this fixture with specified 'read_preference'.
|
||||
|
||||
The PyMongo driver will wait up to 'timeout_millis' milliseconds
|
||||
before concluding that the server is unavailable.
|
||||
@ -154,30 +136,23 @@ class Fixture(object):
|
||||
|
||||
|
||||
class ReplFixture(Fixture):
|
||||
"""
|
||||
Base class for all fixtures that support replication.
|
||||
"""
|
||||
"""Base class for all fixtures that support replication."""
|
||||
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore
|
||||
|
||||
AWAIT_REPL_TIMEOUT_MINS = 5
|
||||
|
||||
def get_primary(self):
|
||||
"""
|
||||
Returns the primary of a replica set, or the master of a
|
||||
master-slave deployment.
|
||||
"""
|
||||
"""Return the primary of a replica set."""
|
||||
raise NotImplementedError("get_primary must be implemented by ReplFixture subclasses")
|
||||
|
||||
def get_secondaries(self):
|
||||
"""
|
||||
Returns a list containing the secondaries of a replica set, or
|
||||
the slave of a master-slave deployment.
|
||||
"""
|
||||
"""Return a list containing the secondaries of a replica set."""
|
||||
raise NotImplementedError("get_secondaries must be implemented by ReplFixture subclasses")
|
||||
|
||||
def retry_until_wtimeout(self, insert_fn):
|
||||
"""
|
||||
"""Retry until wtimeout reached.
|
||||
|
||||
Given a callback function representing an insert operation on
|
||||
the primary, handle any connection failures, and keep retrying
|
||||
the operation for up to 'AWAIT_REPL_TIMEOUT_MINS' minutes.
|
||||
@ -221,9 +196,11 @@ class NoOpFixture(Fixture):
|
||||
REGISTERED_NAME = "NoOpFixture"
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""Return the internal connection string."""
|
||||
return None
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""Return the driver connection URL."""
|
||||
return None
|
||||
|
||||
|
||||
@ -231,7 +208,7 @@ class FixtureTeardownHandler(object):
|
||||
"""A helper class used to teardown nodes inside a cluster and keep track of errors."""
|
||||
|
||||
def __init__(self, logger):
|
||||
"""Initializes a FixtureTeardownHandler.
|
||||
"""Initialize a FixtureTeardownHandler.
|
||||
|
||||
Args:
|
||||
logger: A logger to use to log teardown activity.
|
||||
@ -241,19 +218,18 @@ class FixtureTeardownHandler(object):
|
||||
self._message = None
|
||||
|
||||
def was_successful(self):
|
||||
"""Indicates whether the teardowns performed by this instance were all successful."""
|
||||
"""Indicate whether the teardowns performed by this instance were all successful."""
|
||||
return self._success
|
||||
|
||||
def get_error_message(self):
|
||||
"""
|
||||
Retrieves the combined error message for all the teardown failures or None if all the
|
||||
teardowns were successful.
|
||||
"""Retrieve the combined error message for all the teardown failures.
|
||||
|
||||
Return None if all the teardowns were successful.
|
||||
"""
|
||||
return self._message
|
||||
|
||||
def teardown(self, fixture, name):
|
||||
"""
|
||||
Tears down the given fixture and logs errors instead of raising a ServerFailure exception.
|
||||
def teardown(self, fixture, name): # noqa: D406,D407,D411,D413
|
||||
"""Tear down the given fixture and log errors instead of raising a ServerFailure exception.
|
||||
|
||||
Args:
|
||||
fixture: The fixture to tear down.
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Replica set fixture for executing JSTests against.
|
||||
"""
|
||||
"""Replica set fixture for executing JSTests against."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -17,19 +15,19 @@ from ... import errors
|
||||
from ... import utils
|
||||
|
||||
|
||||
class ReplicaSetFixture(interface.ReplFixture):
|
||||
"""
|
||||
Fixture which provides JSTests with a replica set to run against.
|
||||
"""
|
||||
class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-instance-attributes
|
||||
"""Fixture which provides JSTests with a replica set to run against."""
|
||||
|
||||
# Error response codes copied from mongo/base/error_codes.err.
|
||||
_NODE_NOT_FOUND = 74
|
||||
|
||||
def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
|
||||
dbpath_prefix=None, preserve_dbpath=False, num_nodes=2,
|
||||
start_initial_sync_node=False, write_concern_majority_journal_default=None,
|
||||
auth_options=None, replset_config_options=None, voting_secondaries=None,
|
||||
all_nodes_electable=False, use_replica_set_connection_string=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None,
|
||||
preserve_dbpath=False, num_nodes=2, start_initial_sync_node=False,
|
||||
write_concern_majority_journal_default=None, auth_options=None,
|
||||
replset_config_options=None, voting_secondaries=None, all_nodes_electable=False,
|
||||
use_replica_set_connection_string=None):
|
||||
"""Initialize ReplicaSetFixture."""
|
||||
|
||||
interface.ReplFixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
|
||||
|
||||
@ -71,7 +69,8 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
self.initial_sync_node = None
|
||||
self.initial_sync_node_idx = -1
|
||||
|
||||
def setup(self):
|
||||
def setup(self): # pylint: disable=too-many-branches,too-many-statements
|
||||
"""Set up the replica set."""
|
||||
self.replset_name = self.mongod_options.get("replSet", "rs")
|
||||
|
||||
if not self.nodes:
|
||||
@ -113,7 +112,7 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
"hidden": 1, "votes": 0
|
||||
})
|
||||
|
||||
config = {"_id": self.replset_name}
|
||||
repl_config = {"_id": self.replset_name}
|
||||
client = self.nodes[0].mongo_client()
|
||||
|
||||
if self.auth_options is not None:
|
||||
@ -127,33 +126,33 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
return
|
||||
|
||||
if self.write_concern_majority_journal_default is not None:
|
||||
config[
|
||||
repl_config[
|
||||
"writeConcernMajorityJournalDefault"] = self.write_concern_majority_journal_default
|
||||
else:
|
||||
server_status = client.admin.command({"serverStatus": 1})
|
||||
cmd_line_opts = client.admin.command({"getCmdLineOpts": 1})
|
||||
if not (server_status["storageEngine"]["persistent"] and cmd_line_opts["parsed"].get(
|
||||
"storage", {}).get("journal", {}).get("enabled", True)):
|
||||
config["writeConcernMajorityJournalDefault"] = False
|
||||
repl_config["writeConcernMajorityJournalDefault"] = False
|
||||
|
||||
if self.replset_config_options.get("configsvr", False):
|
||||
config["configsvr"] = True
|
||||
repl_config["configsvr"] = True
|
||||
if self.replset_config_options.get("settings"):
|
||||
replset_settings = self.replset_config_options["settings"]
|
||||
config["settings"] = replset_settings
|
||||
repl_config["settings"] = replset_settings
|
||||
|
||||
# If secondaries vote, all nodes are not electable, and no election timeout was specified,
|
||||
# increase the election timeout to 24 hours to prevent elections.
|
||||
if self.voting_secondaries and not self.all_nodes_electable:
|
||||
config.setdefault("settings", {})
|
||||
if "electionTimeoutMillis" not in config["settings"]:
|
||||
config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000
|
||||
repl_config.setdefault("settings", {})
|
||||
if "electionTimeoutMillis" not in repl_config["settings"]:
|
||||
repl_config["settings"]["electionTimeoutMillis"] = 24 * 60 * 60 * 1000
|
||||
|
||||
# Start up a single node replica set then reconfigure to the correct size (if the config
|
||||
# contains more than 1 node), so the primary is elected more quickly.
|
||||
config["members"] = [members[0]]
|
||||
self.logger.info("Issuing replSetInitiate command: %s", config)
|
||||
self._configure_repl_set(client, {"replSetInitiate": config})
|
||||
repl_config["members"] = [members[0]]
|
||||
self.logger.info("Issuing replSetInitiate command: %s", repl_config)
|
||||
self._configure_repl_set(client, {"replSetInitiate": repl_config})
|
||||
self._await_primary()
|
||||
|
||||
if self.nodes[1:]:
|
||||
@ -161,10 +160,10 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
# command.
|
||||
for node in self.nodes[1:]:
|
||||
node.await_ready()
|
||||
config["version"] = 2
|
||||
config["members"] = members
|
||||
self.logger.info("Issuing replSetReconfig command: %s", config)
|
||||
self._configure_repl_set(client, {"replSetReconfig": config})
|
||||
repl_config["version"] = 2
|
||||
repl_config["members"] = members
|
||||
self.logger.info("Issuing replSetReconfig command: %s", repl_config)
|
||||
self._configure_repl_set(client, {"replSetReconfig": repl_config})
|
||||
self._await_secondaries()
|
||||
|
||||
def _configure_repl_set(self, client, cmd_obj):
|
||||
@ -194,6 +193,7 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
time.sleep(5) # Wait a little bit before trying again.
|
||||
|
||||
def await_ready(self):
|
||||
"""Wait for replica set tpo be ready."""
|
||||
self._await_primary()
|
||||
self._await_secondaries()
|
||||
|
||||
@ -254,6 +254,7 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
raise errors.ServerFailure(teardown_handler.get_error_message())
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if all nodes in the replica set are running."""
|
||||
running = all(node.is_running() for node in self.nodes)
|
||||
|
||||
if self.initial_sync_node:
|
||||
@ -261,7 +262,8 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
|
||||
return running
|
||||
|
||||
def get_primary(self, timeout_secs=30):
|
||||
def get_primary(self, timeout_secs=30): # pylint: disable=arguments-differ
|
||||
"""Return the primary from a replica set."""
|
||||
if not self.all_nodes_electable:
|
||||
# The primary is always the first element of the 'nodes' list because all other members
|
||||
# of the replica set are configured with priority=0.
|
||||
@ -299,17 +301,16 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
raise errors.ServerFailure(msg)
|
||||
|
||||
def get_secondaries(self):
|
||||
"""Return a list of secondaries from the replica set."""
|
||||
primary = self.get_primary()
|
||||
return [node for node in self.nodes if node.port != primary.port]
|
||||
|
||||
def get_initial_sync_node(self):
|
||||
"""Return initila sync node from the replica set."""
|
||||
return self.initial_sync_node
|
||||
|
||||
def _new_mongod(self, index, replset_name):
|
||||
"""
|
||||
Returns a standalone.MongoDFixture configured to be used as a
|
||||
replica-set member of 'replset_name'.
|
||||
"""
|
||||
"""Return a standalone.MongoDFixture configured to be used as replica-set member."""
|
||||
|
||||
mongod_logger = self._get_logger_for_mongod(index)
|
||||
mongod_options = self.mongod_options.copy()
|
||||
@ -321,9 +322,9 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
mongod_options=mongod_options, preserve_dbpath=self.preserve_dbpath)
|
||||
|
||||
def _get_logger_for_mongod(self, index):
|
||||
"""
|
||||
Returns a new logging.Logger instance for use as the primary, secondary, or initial
|
||||
sync member of a replica-set.
|
||||
"""Return a new logging.Logger instance.
|
||||
|
||||
The instance is used as the primary, secondary, or initial sync member of a replica-set.
|
||||
"""
|
||||
|
||||
if index == self.initial_sync_node_idx:
|
||||
@ -339,6 +340,7 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
return self.logger.new_fixture_node_logger(node_name)
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""Return the internal connection string."""
|
||||
if self.replset_name is None:
|
||||
raise ValueError("Must call setup() before calling get_internal_connection_string()")
|
||||
|
||||
@ -348,6 +350,7 @@ class ReplicaSetFixture(interface.ReplFixture):
|
||||
return self.replset_name + "/" + ",".join(conn_strs)
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""Return the driver connection URL."""
|
||||
if self.replset_name is None:
|
||||
raise ValueError("Must call setup() before calling get_driver_connection_url()")
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Sharded cluster fixture for executing JSTests against.
|
||||
"""
|
||||
"""Sharded cluster fixture for executing JSTests against."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -20,24 +18,19 @@ from ... import utils
|
||||
from ...utils import registry
|
||||
|
||||
|
||||
class ShardedClusterFixture(interface.Fixture):
|
||||
"""
|
||||
Fixture which provides JSTests with a sharded cluster to run
|
||||
against.
|
||||
"""
|
||||
class ShardedClusterFixture(interface.Fixture): # pylint: disable=too-many-instance-attributes
|
||||
"""Fixture which provides JSTests with a sharded cluster to run against."""
|
||||
|
||||
_CONFIGSVR_REPLSET_NAME = "config-rs"
|
||||
_SHARD_REPLSET_NAME_PREFIX = "shard-rs"
|
||||
|
||||
def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None,
|
||||
mongod_executable=None, mongod_options=None, dbpath_prefix=None,
|
||||
preserve_dbpath=False, num_shards=1, num_rs_nodes_per_shard=None,
|
||||
separate_configsvr=True, enable_sharding=None, enable_balancer=True,
|
||||
auth_options=None, configsvr_options=None, shard_options=None):
|
||||
"""
|
||||
Initializes ShardedClusterFixture with the different options to
|
||||
the mongod and mongos processes.
|
||||
"""
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-locals
|
||||
self, logger, job_num, mongos_executable=None, mongos_options=None,
|
||||
mongod_executable=None, mongod_options=None, dbpath_prefix=None, preserve_dbpath=False,
|
||||
num_shards=1, num_rs_nodes_per_shard=None, separate_configsvr=True,
|
||||
enable_sharding=None, enable_balancer=True, auth_options=None, configsvr_options=None,
|
||||
shard_options=None):
|
||||
"""Initialize ShardedClusterFixture with different options for the cluster processes."""
|
||||
|
||||
interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
|
||||
|
||||
@ -65,6 +58,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
self.shards = []
|
||||
|
||||
def setup(self):
|
||||
"""Set up the sharded cluster."""
|
||||
if self.separate_configsvr:
|
||||
if self.configsvr is None:
|
||||
self.configsvr = self._new_configsvr()
|
||||
@ -87,6 +81,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
shard.setup()
|
||||
|
||||
def await_ready(self):
|
||||
"""Block until the fixture can be used for testing."""
|
||||
# Wait for the config server
|
||||
if self.configsvr is not None:
|
||||
self.configsvr.await_ready()
|
||||
@ -130,9 +125,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
primary.admin.command({"refreshLogicalSessionCacheNow": 1})
|
||||
|
||||
def _do_teardown(self):
|
||||
"""
|
||||
Shuts down the sharded cluster.
|
||||
"""
|
||||
"""Shut down the sharded cluster."""
|
||||
self.logger.info("Stopping all members of the sharded cluster...")
|
||||
|
||||
running_at_start = self.is_running()
|
||||
@ -158,28 +151,24 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
raise errors.ServerFailure(teardown_handler.get_error_message())
|
||||
|
||||
def is_running(self):
|
||||
"""
|
||||
Returns true if the config server, all shards, and the mongos
|
||||
are all still operating, and false otherwise.
|
||||
"""
|
||||
"""Return true if the all nodes in the cluster are all still operating."""
|
||||
return (self.configsvr is not None and self.configsvr.is_running()
|
||||
and all(shard.is_running() for shard in self.shards) and self.mongos is not None
|
||||
and self.mongos.is_running())
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""Return the internal connection string."""
|
||||
if self.mongos is None:
|
||||
raise ValueError("Must call setup() before calling get_internal_connection_string()")
|
||||
|
||||
return self.mongos.get_internal_connection_string()
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""Return the driver connection URL."""
|
||||
return "mongodb://" + self.get_internal_connection_string()
|
||||
|
||||
def _new_configsvr(self):
|
||||
"""
|
||||
Returns a replicaset.ReplicaSetFixture configured to be used as
|
||||
the config server of a sharded cluster.
|
||||
"""
|
||||
"""Return a replicaset.ReplicaSetFixture configured as the config server."""
|
||||
|
||||
mongod_logger = self.logger.new_fixture_node_logger("configsvr")
|
||||
|
||||
@ -207,10 +196,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
**configsvr_options)
|
||||
|
||||
def _new_rs_shard(self, index, num_rs_nodes_per_shard):
|
||||
"""
|
||||
Returns a replicaset.ReplicaSetFixture configured to be used as a
|
||||
shard in a sharded cluster.
|
||||
"""
|
||||
"""Return a replicaset.ReplicaSetFixture configured as a shard in a sharded cluster."""
|
||||
|
||||
mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index))
|
||||
|
||||
@ -236,10 +222,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
replset_config_options=replset_config_options, **shard_options)
|
||||
|
||||
def _new_standalone_shard(self, index):
|
||||
"""
|
||||
Returns a standalone.MongoDFixture configured to be used as a
|
||||
shard in a sharded cluster.
|
||||
"""
|
||||
"""Return a standalone.MongoDFixture configured as a shard in a sharded cluster."""
|
||||
|
||||
mongod_logger = self.logger.new_fixture_node_logger("shard{}".format(index))
|
||||
|
||||
@ -258,10 +241,7 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
mongod_options=mongod_options, preserve_dbpath=preserve_dbpath, **shard_options)
|
||||
|
||||
def _new_mongos(self):
|
||||
"""
|
||||
Returns a _MongoSFixture configured to be used as the mongos for
|
||||
a sharded cluster.
|
||||
"""
|
||||
"""Return a _MongoSFixture configured to be used as the mongos for a sharded cluster."""
|
||||
|
||||
mongos_logger = self.logger.new_fixture_node_logger("mongos")
|
||||
|
||||
@ -277,11 +257,9 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
|
||||
def _add_shard(self, client, shard):
|
||||
"""
|
||||
Add the specified program as a shard by executing the addShard
|
||||
command.
|
||||
Add the specified program as a shard by executing the addShard command.
|
||||
|
||||
See https://docs.mongodb.org/manual/reference/command/addShard
|
||||
for more details.
|
||||
See https://docs.mongodb.org/manual/reference/command/addShard for more details.
|
||||
"""
|
||||
|
||||
connection_string = shard.get_internal_connection_string()
|
||||
@ -290,13 +268,12 @@ class ShardedClusterFixture(interface.Fixture):
|
||||
|
||||
|
||||
class _MongoSFixture(interface.Fixture):
|
||||
"""
|
||||
Fixture which provides JSTests with a mongos to connect to.
|
||||
"""
|
||||
"""Fixture which provides JSTests with a mongos to connect to."""
|
||||
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED # type: ignore
|
||||
|
||||
def __init__(self, logger, job_num, mongos_executable=None, mongos_options=None):
|
||||
"""Initialize _MongoSFixture."""
|
||||
|
||||
interface.Fixture.__init__(self, logger, job_num)
|
||||
|
||||
@ -309,6 +286,7 @@ class _MongoSFixture(interface.Fixture):
|
||||
self.port = None
|
||||
|
||||
def setup(self):
|
||||
"""Set up the sharded cluster."""
|
||||
if "port" not in self.mongos_options:
|
||||
self.mongos_options["port"] = core.network.PortAllocator.next_fixture_port(self.job_num)
|
||||
self.port = self.mongos_options["port"]
|
||||
@ -327,6 +305,7 @@ class _MongoSFixture(interface.Fixture):
|
||||
self.mongos = mongos
|
||||
|
||||
def await_ready(self):
|
||||
"""Block until the fixture can be used for testing."""
|
||||
deadline = time.time() + standalone.MongoDFixture.AWAIT_READY_TIMEOUT_SECS
|
||||
|
||||
# Wait until the mongos is accepting connections. The retry logic is necessary to support
|
||||
@ -383,13 +362,16 @@ class _MongoSFixture(interface.Fixture):
|
||||
self.port, self.mongos.pid, exit_code))
|
||||
|
||||
def is_running(self):
|
||||
"""Return true if the cluster is still operating."""
|
||||
return self.mongos is not None and self.mongos.poll() is None
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""Return the internal connection string."""
|
||||
if self.mongos is None:
|
||||
raise ValueError("Must call setup() before calling get_internal_connection_string()")
|
||||
|
||||
return "localhost:%d" % self.port
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""Return the driver connection URL."""
|
||||
return "mongodb://" + self.get_internal_connection_string()
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Standalone mongod fixture for executing JSTests against.
|
||||
"""
|
||||
"""Standalone mongod fixture for executing JSTests against."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -20,15 +18,14 @@ from ... import utils
|
||||
|
||||
|
||||
class MongoDFixture(interface.Fixture):
|
||||
"""
|
||||
Fixture which provides JSTests with a standalone mongod to run
|
||||
against.
|
||||
"""
|
||||
"""Fixture which provides JSTests with a standalone mongod to run against."""
|
||||
|
||||
AWAIT_READY_TIMEOUT_SECS = 300
|
||||
|
||||
def __init__(self, logger, job_num, mongod_executable=None, mongod_options=None,
|
||||
dbpath_prefix=None, preserve_dbpath=False):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, job_num, mongod_executable=None, mongod_options=None, dbpath_prefix=None,
|
||||
preserve_dbpath=False):
|
||||
"""Initialize MongoDFixture with different options for the mongod process."""
|
||||
|
||||
interface.Fixture.__init__(self, logger, job_num, dbpath_prefix=dbpath_prefix)
|
||||
|
||||
@ -51,6 +48,7 @@ class MongoDFixture(interface.Fixture):
|
||||
self.port = None
|
||||
|
||||
def setup(self):
|
||||
"""Set up the mongod."""
|
||||
if not self.preserve_dbpath:
|
||||
shutil.rmtree(self._dbpath, ignore_errors=True)
|
||||
|
||||
@ -78,6 +76,7 @@ class MongoDFixture(interface.Fixture):
|
||||
self.mongod = mongod
|
||||
|
||||
def await_ready(self):
|
||||
"""Block until the fixture can be used for testing."""
|
||||
deadline = time.time() + MongoDFixture.AWAIT_READY_TIMEOUT_SECS
|
||||
|
||||
# Wait until the mongod is accepting connections. The retry logic is necessary to support
|
||||
@ -134,17 +133,20 @@ class MongoDFixture(interface.Fixture):
|
||||
self.port, self.mongod.pid, exit_code))
|
||||
|
||||
def is_running(self):
|
||||
"""Return true if the mongod is still operating."""
|
||||
return self.mongod is not None and self.mongod.poll() is None
|
||||
|
||||
def get_dbpath_prefix(self):
|
||||
""" Returns the _dbpath, as this is the root of the data directory. """
|
||||
"""Return the _dbpath, as this is the root of the data directory."""
|
||||
return self._dbpath
|
||||
|
||||
def get_internal_connection_string(self):
|
||||
"""Return the internal connection string."""
|
||||
if self.mongod is None:
|
||||
raise ValueError("Must call setup() before calling get_internal_connection_string()")
|
||||
|
||||
return "localhost:%d" % self.port
|
||||
|
||||
def get_driver_connection_url(self):
|
||||
"""Return the driver connection URL."""
|
||||
return "mongodb://" + self.get_internal_connection_string()
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Fixture for generating lots of log messages.
|
||||
"""
|
||||
"""Fixture for generating lots of log messages."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,18 +8,18 @@ from . import interface
|
||||
from ...core import programs
|
||||
|
||||
|
||||
class YesFixture(interface.Fixture):
|
||||
"""
|
||||
Fixture which spawns potentially several 'yes' executables to generate lots of log messages.
|
||||
"""
|
||||
class YesFixture(interface.Fixture): # pylint: disable=abstract-method
|
||||
"""Fixture which spawns several 'yes' executables to generate lots of log messages."""
|
||||
|
||||
def __init__(self, logger, job_num, num_instances=1, message_length=100):
|
||||
"""Initialize YesFixture."""
|
||||
interface.Fixture.__init__(self, logger, job_num)
|
||||
|
||||
self.__processes = [None] * num_instances
|
||||
self.__message = "y" * message_length
|
||||
|
||||
def setup(self):
|
||||
"""Start the yes processes."""
|
||||
for (i, process) in enumerate(self.__processes):
|
||||
process = self._make_process(i)
|
||||
|
||||
@ -65,4 +63,5 @@ class YesFixture(interface.Fixture):
|
||||
return success
|
||||
|
||||
def is_running(self):
|
||||
"""Return true if the yes processes are running."""
|
||||
return all(process is not None and process.poll() is None for process in self.__processes)
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Enables supports for archiving tests or hooks.
|
||||
"""
|
||||
"""Enable support for archiving tests or hooks."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -13,11 +11,10 @@ from ..utils import globstar
|
||||
|
||||
|
||||
class HookTestArchival(object):
|
||||
"""
|
||||
Archives hooks and tests to S3.
|
||||
"""
|
||||
"""Archive hooks and tests to S3."""
|
||||
|
||||
def __init__(self, suite, hooks, archive_instance, archive_config):
|
||||
"""Initialize HookTestArchival."""
|
||||
self.archive_instance = archive_instance
|
||||
archive_config = utils.default_if_none(archive_config, {})
|
||||
|
||||
@ -45,11 +42,11 @@ class HookTestArchival(object):
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def _should_archive(self, success):
|
||||
""" Return True if failed test or 'on_success' is True. """
|
||||
"""Return True if failed test or 'on_success' is True."""
|
||||
return not success or self.on_success
|
||||
|
||||
def _archive_hook(self, logger, hook, test, success):
|
||||
""" Helper to archive hooks. """
|
||||
"""Provide helper to archive hooks."""
|
||||
hook_match = hook.REGISTERED_NAME in self.hooks
|
||||
if not hook_match or not self._should_archive(success):
|
||||
return
|
||||
@ -58,7 +55,7 @@ class HookTestArchival(object):
|
||||
self._archive_hook_or_test(logger, test_name, test)
|
||||
|
||||
def _archive_test(self, logger, test, success):
|
||||
""" Helper to archive tests. """
|
||||
"""Provide helper to archive tests."""
|
||||
test_name = test.test_name
|
||||
test_match = False
|
||||
for arch_test in self.tests:
|
||||
@ -72,7 +69,7 @@ class HookTestArchival(object):
|
||||
self._archive_hook_or_test(logger, test_name, test)
|
||||
|
||||
def archive(self, logger, test, success, hook=None):
|
||||
""" Archives data files for hooks or tests. """
|
||||
"""Archive data files for hooks or tests."""
|
||||
if not config.ARCHIVE_FILE or not self.archive_instance:
|
||||
return
|
||||
if hook:
|
||||
@ -81,7 +78,7 @@ class HookTestArchival(object):
|
||||
self._archive_test(logger, test, success)
|
||||
|
||||
def _archive_hook_or_test(self, logger, test_name, test):
|
||||
""" Trigger archive of data files for a test or hook. """
|
||||
"""Trigger archive of data files for a test or hook."""
|
||||
|
||||
with self._lock:
|
||||
# Test repeat number is how many times the particular test has been archived.
|
||||
|
@ -12,4 +12,4 @@ from ...utils import autoloader as _autoloader
|
||||
|
||||
# We dynamically load all modules in the hooks/ package so that any Hook classes declared
|
||||
# within them are automatically registered.
|
||||
_autoloader.load_all_modules(name=__name__, path=__path__)
|
||||
_autoloader.load_all_modules(name=__name__, path=__path__) # type: ignore
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying that the primary has not stepped down or changed.
|
||||
"""
|
||||
"""Test hook for verifying that the primary has not stepped down or changed."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -15,6 +13,7 @@ class CheckPrimary(interface.Hook):
|
||||
"""Hook that checks that the primary is still primary after the test."""
|
||||
|
||||
def __init__(self, hook_logger, rs_fixture):
|
||||
"""Initialize CheckPrimary."""
|
||||
description = "Verify that the primary has not stepped down or changed"
|
||||
interface.Hook.__init__(self, hook_logger, rs_fixture, description)
|
||||
|
||||
@ -39,9 +38,11 @@ class CheckPrimary(interface.Hook):
|
||||
raise no_primary_err
|
||||
|
||||
def before_test(self, test, test_report):
|
||||
"""Before test hook primary."""
|
||||
self._primary_url = self._get_primary_url()
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test hook primary."""
|
||||
new_primary_url = self._get_primary_url()
|
||||
|
||||
if new_primary_url != self._primary_url:
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Testing hook for cleaning up data files created by the fixture.
|
||||
"""
|
||||
"""Test hook for cleaning up data files created by the fixture."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,14 +8,15 @@ from . import interface
|
||||
|
||||
|
||||
class CleanEveryN(interface.Hook):
|
||||
"""
|
||||
Restarts the fixture after it has ran 'n' tests.
|
||||
"""Restart the fixture after it has ran 'n' tests.
|
||||
|
||||
On mongod-related fixtures, this will clear the dbpath.
|
||||
"""
|
||||
|
||||
DEFAULT_N = 20
|
||||
|
||||
def __init__(self, hook_logger, fixture, n=DEFAULT_N):
|
||||
"""Initialize CleanEveryN."""
|
||||
description = "CleanEveryN (restarts the fixture after running `n` tests)"
|
||||
interface.Hook.__init__(self, hook_logger, fixture, description)
|
||||
|
||||
@ -27,10 +26,11 @@ class CleanEveryN(interface.Hook):
|
||||
" the fixture after each test instead of after every %d.", n)
|
||||
n = 1
|
||||
|
||||
self.n = n
|
||||
self.n = n # pylint: disable=invalid-name
|
||||
self.tests_run = 0
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test cleanup."""
|
||||
self.tests_run += 1
|
||||
if self.tests_run < self.n:
|
||||
return
|
||||
@ -42,7 +42,10 @@ class CleanEveryN(interface.Hook):
|
||||
|
||||
|
||||
class CleanEveryNTestCase(interface.DynamicTestCase):
|
||||
"""CleanEveryNTestCase class."""
|
||||
|
||||
def run_test(self):
|
||||
"""Execute test hook."""
|
||||
try:
|
||||
self.logger.info("%d tests have been run against the fixture, stopping it...",
|
||||
self._hook.tests_run)
|
||||
|
@ -12,7 +12,8 @@ from buildscripts.resmokelib.testing.hooks import interface
|
||||
|
||||
|
||||
class CombineBenchmarkResults(interface.Hook):
|
||||
"""
|
||||
"""CombineBenchmarkResults class.
|
||||
|
||||
The CombineBenchmarkResults hook combines test results from
|
||||
individual benchmark files to a single file. This is useful for
|
||||
generating the json file to feed into the Evergreen performance
|
||||
@ -22,6 +23,7 @@ class CombineBenchmarkResults(interface.Hook):
|
||||
DESCRIPTION = "Combine JSON results from individual benchmarks"
|
||||
|
||||
def __init__(self, hook_logger, fixture):
|
||||
"""Initialize CombineBenchmarkResults."""
|
||||
interface.Hook.__init__(self, hook_logger, fixture, CombineBenchmarkResults.DESCRIPTION)
|
||||
self.report_file = _config.PERF_REPORT_FILE
|
||||
|
||||
@ -35,27 +37,30 @@ class CombineBenchmarkResults(interface.Hook):
|
||||
def _strftime(time):
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
def after_test(self, test_case, test_report):
|
||||
def after_test(self, test, test_report):
|
||||
"""Update test report."""
|
||||
if self.report_file is None:
|
||||
return
|
||||
|
||||
bm_report_path = test_case.report_name()
|
||||
bm_report_path = test.report_name()
|
||||
|
||||
with open(bm_report_path, "r") as report_file:
|
||||
report_dict = json.load(report_file)
|
||||
self._parse_report(report_dict)
|
||||
|
||||
def before_suite(self, test_report):
|
||||
"""Set suite start time."""
|
||||
self.create_time = datetime.datetime.now()
|
||||
|
||||
def after_suite(self, test_report):
|
||||
"""Update test report."""
|
||||
if self.report_file is None:
|
||||
return
|
||||
|
||||
self.end_time = datetime.datetime.now()
|
||||
report = self._generate_perf_plugin_report()
|
||||
with open(self.report_file, "w") as f:
|
||||
json.dump(report, f)
|
||||
with open(self.report_file, "w") as fh:
|
||||
json.dump(report, fh)
|
||||
|
||||
def _generate_perf_plugin_report(self):
|
||||
"""Format the data to look like a perf plugin report."""
|
||||
@ -68,8 +73,7 @@ class CombineBenchmarkResults(interface.Hook):
|
||||
|
||||
for name, report in self.benchmark_reports.items():
|
||||
test_report = {
|
||||
"name": name,
|
||||
"context": report.context._asdict(),
|
||||
"name": name, "context": report.context._asdict(),
|
||||
"results": report.generate_perf_plugin_dict()
|
||||
}
|
||||
|
||||
@ -93,15 +97,13 @@ class CombineBenchmarkResults(interface.Hook):
|
||||
|
||||
|
||||
# Capture information from a Benchmark name in a logical format.
|
||||
_BenchmarkName = collections.namedtuple("_BenchmarkName", [
|
||||
"base_name",
|
||||
"thread_count",
|
||||
"statistic_type"
|
||||
]);
|
||||
_BenchmarkName = collections.namedtuple("_BenchmarkName",
|
||||
["base_name", "thread_count", "statistic_type"])
|
||||
|
||||
|
||||
class _BenchmarkThreadsReport(object):
|
||||
"""
|
||||
"""_BenchmarkThreadsReport class.
|
||||
|
||||
Class representation of a report for all thread levels of a single
|
||||
benchmark test. Each report is designed to correspond to one graph
|
||||
in the Evergreen perf plugin.
|
||||
@ -127,10 +129,11 @@ class _BenchmarkThreadsReport(object):
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
CONTEXT_FIELDS = [
|
||||
"date", "cpu_scaling_enabled", "num_cpus", "mhz_per_cpu", "library_build_type"
|
||||
]
|
||||
Context = collections.namedtuple("Context", CONTEXT_FIELDS)
|
||||
Context = collections.namedtuple("Context", CONTEXT_FIELDS) # type: ignore
|
||||
|
||||
def __init__(self, context_dict):
|
||||
self.context = self.Context(**context_dict)
|
||||
@ -139,11 +142,11 @@ class _BenchmarkThreadsReport(object):
|
||||
self.thread_benchmark_map = collections.defaultdict(list)
|
||||
|
||||
def add_report(self, bm_name_obj, report):
|
||||
"""Add to report."""
|
||||
self.thread_benchmark_map[bm_name_obj.thread_count].append(report)
|
||||
|
||||
def generate_perf_plugin_dict(self):
|
||||
"""
|
||||
Generate perf plugin data points of the following format:
|
||||
"""Generate perf plugin data points of the following format.
|
||||
|
||||
"1": {
|
||||
"error_values": [
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying data consistency across a replica set.
|
||||
"""
|
||||
"""Test hook for verifying data consistency across a replica set."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,13 +8,16 @@ from . import jsfile
|
||||
|
||||
|
||||
class CheckReplDBHash(jsfile.DataConsistencyHook):
|
||||
"""
|
||||
Checks that the dbhashes of all non-local databases and non-replicated system collections
|
||||
"""Check if the dbhashes match.
|
||||
|
||||
This includes dbhashes for all non-local databases and non-replicated system collections that
|
||||
match on the primary and secondaries.
|
||||
"""
|
||||
|
||||
def __init__(self, hook_logger, fixture, shell_options=None):
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, hook_logger, fixture, shell_options=None):
|
||||
"""Initialize CheckReplDBHash."""
|
||||
description = "Check dbhashes of all replica set or master/slave members"
|
||||
js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash.js")
|
||||
jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
|
||||
shell_options=shell_options)
|
||||
jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
|
||||
self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying correctness of initial sync.
|
||||
"""
|
||||
"""Test hook for verifying correctness of initial sync."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -18,7 +16,8 @@ from ... import errors
|
||||
|
||||
|
||||
class BackgroundInitialSync(interface.Hook):
|
||||
"""
|
||||
"""BackgroundInitialSync class.
|
||||
|
||||
After every test, this hook checks if a background node has finished initial sync and if so,
|
||||
validates it, tears it down, and restarts it.
|
||||
|
||||
@ -33,6 +32,7 @@ class BackgroundInitialSync(interface.Hook):
|
||||
DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N
|
||||
|
||||
def __init__(self, hook_logger, fixture, n=DEFAULT_N, shell_options=None):
|
||||
"""Initialize BackgroundInitialSync."""
|
||||
if not isinstance(fixture, replicaset.ReplicaSetFixture):
|
||||
raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format(
|
||||
fixture.__class__.__name__))
|
||||
@ -40,12 +40,13 @@ class BackgroundInitialSync(interface.Hook):
|
||||
description = "Background Initial Sync"
|
||||
interface.Hook.__init__(self, hook_logger, fixture, description)
|
||||
|
||||
self.n = n
|
||||
self.n = n # pylint: disable=invalid-name
|
||||
self.tests_run = 0
|
||||
self.random_restarts = 0
|
||||
self._shell_options = shell_options
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test execution."""
|
||||
self.tests_run += 1
|
||||
|
||||
hook_test_case = BackgroundInitialSyncTestCase.create_after_test(
|
||||
@ -55,14 +56,18 @@ class BackgroundInitialSync(interface.Hook):
|
||||
|
||||
|
||||
class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
|
||||
"""BackgroundInitialSyncTestCase class."""
|
||||
|
||||
JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js")
|
||||
|
||||
def __init__(self, logger, test_name, description, base_test_name, hook, shell_options=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, test_name, description, base_test_name, hook, shell_options=None):
|
||||
"""Initialize BackgroundInitialSyncTestCase."""
|
||||
jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name,
|
||||
hook, self.JS_FILENAME, shell_options)
|
||||
|
||||
def run_test(self):
|
||||
"""Execute test hook."""
|
||||
sync_node = self.fixture.get_initial_sync_node()
|
||||
sync_node_conn = sync_node.mongo_client()
|
||||
|
||||
@ -96,7 +101,7 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
|
||||
if self._hook.random_restarts < 1 and random.random() < 0.2:
|
||||
self.logger.info(
|
||||
"randomly restarting initial sync in the middle of initial sync")
|
||||
self.__restart_init_sync(sync_node, sync_node_conn)
|
||||
self.__restart_init_sync(sync_node)
|
||||
self._hook.random_restarts += 1
|
||||
return
|
||||
except pymongo.errors.OperationFailure:
|
||||
@ -112,10 +117,10 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
|
||||
# Run data validation and dbhash checking.
|
||||
self._js_test.run_test()
|
||||
|
||||
self.__restart_init_sync(sync_node, sync_node_conn)
|
||||
self.__restart_init_sync(sync_node)
|
||||
|
||||
# Restarts initial sync by shutting down the node, clearing its data, and restarting it.
|
||||
def __restart_init_sync(self, sync_node, sync_node_conn):
|
||||
def __restart_init_sync(self, sync_node):
|
||||
# Tear down and restart the initial sync node to start initial sync again.
|
||||
sync_node.teardown()
|
||||
|
||||
@ -125,7 +130,8 @@ class BackgroundInitialSyncTestCase(jsfile.DynamicJSTestCase):
|
||||
|
||||
|
||||
class IntermediateInitialSync(interface.Hook):
|
||||
"""
|
||||
"""IntermediateInitialSync class.
|
||||
|
||||
This hook accepts a parameter 'n' that specifies a number of tests after which it will start up
|
||||
a node to initial sync, wait for replication to finish, and then validate the data.
|
||||
|
||||
@ -135,6 +141,7 @@ class IntermediateInitialSync(interface.Hook):
|
||||
DEFAULT_N = cleanup.CleanEveryN.DEFAULT_N
|
||||
|
||||
def __init__(self, hook_logger, fixture, n=DEFAULT_N):
|
||||
"""Initialize IntermediateInitialSync."""
|
||||
if not isinstance(fixture, replicaset.ReplicaSetFixture):
|
||||
raise ValueError("`fixture` must be an instance of ReplicaSetFixture, not {}".format(
|
||||
fixture.__class__.__name__))
|
||||
@ -142,7 +149,7 @@ class IntermediateInitialSync(interface.Hook):
|
||||
description = "Intermediate Initial Sync"
|
||||
interface.Hook.__init__(self, hook_logger, fixture, description)
|
||||
|
||||
self.n = n
|
||||
self.n = n # pylint: disable=invalid-name
|
||||
self.tests_run = 0
|
||||
|
||||
def _should_run_after_test(self):
|
||||
@ -156,6 +163,7 @@ class IntermediateInitialSync(interface.Hook):
|
||||
return True
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test execution."""
|
||||
if not self._should_run_after_test():
|
||||
return
|
||||
|
||||
@ -166,14 +174,18 @@ class IntermediateInitialSync(interface.Hook):
|
||||
|
||||
|
||||
class IntermediateInitialSyncTestCase(jsfile.DynamicJSTestCase):
|
||||
"""IntermediateInitialSyncTestCase class."""
|
||||
|
||||
JS_FILENAME = os.path.join("jstests", "hooks", "run_initial_sync_node_validation.js")
|
||||
|
||||
def __init__(self, logger, test_name, description, base_test_name, hook):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, test_name, description, base_test_name, hook):
|
||||
"""Initialize IntermediateInitialSyncTestCase."""
|
||||
jsfile.DynamicJSTestCase.__init__(self, logger, test_name, description, base_test_name,
|
||||
hook, self.JS_FILENAME)
|
||||
|
||||
def run_test(self):
|
||||
"""Execute test hook."""
|
||||
sync_node = self.fixture.get_initial_sync_node()
|
||||
sync_node_conn = sync_node.mongo_client()
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Interface for customizing the behavior of a test fixture.
|
||||
"""
|
||||
"""Interface for customizing the behavior of a test fixture."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -11,13 +9,11 @@ from ... import errors
|
||||
from ...logging import loggers
|
||||
from ...utils import registry
|
||||
|
||||
_HOOKS = {}
|
||||
_HOOKS = {} # type: ignore
|
||||
|
||||
|
||||
def make_hook(class_name, *args, **kwargs):
|
||||
"""
|
||||
Factory function for creating Hook instances.
|
||||
"""
|
||||
"""Provide factory function for creating Hook instances."""
|
||||
|
||||
if class_name not in _HOOKS:
|
||||
raise ValueError("Unknown hook class '%s'" % class_name)
|
||||
@ -26,18 +22,14 @@ def make_hook(class_name, *args, **kwargs):
|
||||
|
||||
|
||||
class Hook(object):
|
||||
"""
|
||||
The common interface all Hooks will inherit from.
|
||||
"""
|
||||
"""Common interface all Hooks will inherit from."""
|
||||
|
||||
__metaclass__ = registry.make_registry_metaclass(_HOOKS)
|
||||
__metaclass__ = registry.make_registry_metaclass(_HOOKS) # type: ignore
|
||||
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
|
||||
|
||||
def __init__(self, hook_logger, fixture, description):
|
||||
"""
|
||||
Initializes the Hook with the specified fixture.
|
||||
"""
|
||||
"""Initialize the Hook with the specified fixture."""
|
||||
|
||||
if not isinstance(hook_logger, loggers.HookLogger):
|
||||
raise TypeError("logger must be a HookLogger instance")
|
||||
@ -47,42 +39,38 @@ class Hook(object):
|
||||
self.description = description
|
||||
|
||||
def before_suite(self, test_report):
|
||||
"""
|
||||
The test runner calls this exactly once before they start
|
||||
running the suite.
|
||||
"""
|
||||
"""Test runner calls this exactly once before they start running the suite."""
|
||||
pass
|
||||
|
||||
def after_suite(self, test_report):
|
||||
"""
|
||||
The test runner calls this exactly once after all tests have
|
||||
finished executing. Be sure to reset the behavior back to its
|
||||
original state so that it can be run again.
|
||||
"""Invoke by test runner calls this exactly once after all tests have finished executing.
|
||||
|
||||
Be sure to reset the behavior back to its original state so that it can be run again.
|
||||
"""
|
||||
pass
|
||||
|
||||
def before_test(self, test, test_report):
|
||||
"""
|
||||
Each test will call this before it executes.
|
||||
"""
|
||||
"""Each test will call this before it executes."""
|
||||
pass
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""
|
||||
Each test will call this after it executes.
|
||||
"""
|
||||
"""Each test will call this after it executes."""
|
||||
pass
|
||||
|
||||
|
||||
class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
|
||||
def __init__(self, logger, test_name, description, base_test_name, hook):
|
||||
"""DynamicTestCase class."""
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, test_name, description, base_test_name, hook):
|
||||
"""Initialize DynamicTestCase."""
|
||||
testcase.TestCase.__init__(self, logger, "Hook", test_name)
|
||||
self.description = description
|
||||
self._hook = hook
|
||||
self._base_test_name = base_test_name
|
||||
|
||||
def run_dynamic_test(self, test_report):
|
||||
"""Helper method to run a dynamic test and update the test report."""
|
||||
"""Provide helper method to run a dynamic test and update the test report."""
|
||||
test_report.startTest(self, dynamic=True)
|
||||
try:
|
||||
self.run_test()
|
||||
@ -102,11 +90,12 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
|
||||
test_report.stopTest(self)
|
||||
|
||||
def as_command(self):
|
||||
"""Provide base method."""
|
||||
return "(dynamic test case)"
|
||||
|
||||
@classmethod
|
||||
def create_before_test(cls, logger, base_test, hook, *args, **kwargs):
|
||||
"""Creates a hook dynamic test to be run before an existing test."""
|
||||
"""Create a hook dynamic test to be run before an existing test."""
|
||||
base_test_name = base_test.short_name()
|
||||
test_name = cls._make_test_name(base_test_name, hook)
|
||||
description = "{} before running '{}'".format(hook.description, base_test_name)
|
||||
@ -114,7 +103,7 @@ class DynamicTestCase(testcase.TestCase): # pylint: disable=abstract-method
|
||||
|
||||
@classmethod
|
||||
def create_after_test(cls, logger, base_test, hook, *args, **kwargs):
|
||||
"""Creates a hook dynamic test to be run after an existing test."""
|
||||
"""Create a hook dynamic test to be run after an existing test."""
|
||||
base_test_name = base_test.short_name()
|
||||
test_name = cls._make_test_name(base_test_name, hook)
|
||||
description = "{} after running '{}'".format(hook.description, base_test_name)
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Interface for customizing the behavior of a test fixture by executing a
|
||||
JavaScript file.
|
||||
"""
|
||||
"""Interface for customizing the behavior of a test fixture by executing a JavaScript file."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -16,19 +13,23 @@ class JSHook(interface.Hook):
|
||||
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
|
||||
|
||||
def __init__(self, hook_logger, fixture, js_filename, description, shell_options=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, hook_logger, fixture, js_filename, description, shell_options=None):
|
||||
"""Initialize JSHook."""
|
||||
interface.Hook.__init__(self, hook_logger, fixture, description)
|
||||
self._js_filename = js_filename
|
||||
self._shell_options = shell_options
|
||||
|
||||
def _should_run_after_test(self): # pylint: disable=no-self-use
|
||||
"""
|
||||
"""Provide base callback.
|
||||
|
||||
Callback that can be overrided by subclasses to indicate if the JavaScript file should be
|
||||
executed after the current test.
|
||||
"""
|
||||
return True
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test execution."""
|
||||
if not self._should_run_after_test():
|
||||
return
|
||||
|
||||
@ -49,6 +50,7 @@ class DataConsistencyHook(JSHook):
|
||||
REGISTERED_NAME = registry.LEAVE_UNREGISTERED
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test execution."""
|
||||
try:
|
||||
JSHook.after_test(self, test, test_report)
|
||||
except errors.TestFailure as err:
|
||||
@ -58,23 +60,29 @@ class DataConsistencyHook(JSHook):
|
||||
class DynamicJSTestCase(interface.DynamicTestCase):
|
||||
"""A dynamic TestCase that runs a JavaScript file."""
|
||||
|
||||
def __init__(self, logger, test_name, description, base_test_name, hook, js_filename,
|
||||
shell_options=None):
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, test_name, description, base_test_name, hook, js_filename,
|
||||
shell_options=None):
|
||||
"""Initialize DynamicJSTestCase."""
|
||||
interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
|
||||
hook)
|
||||
self._js_test = jstest.JSTestCase(logger, js_filename, shell_options=shell_options)
|
||||
|
||||
def override_logger(self, new_logger):
|
||||
"""Override logger."""
|
||||
interface.DynamicTestCase.override_logger(self, new_logger)
|
||||
self._js_test.override_logger(new_logger)
|
||||
|
||||
def reset_logger(self):
|
||||
"""Reset the logger."""
|
||||
interface.DynamicTestCase.reset_logger(self)
|
||||
self._js_test.reset_logger()
|
||||
|
||||
def configure(self, fixture, *args, **kwargs): # pylint: disable=unused-argument
|
||||
"""Configure the fixture."""
|
||||
interface.DynamicTestCase.configure(self, fixture, *args, **kwargs)
|
||||
self._js_test.configure(fixture, *args, **kwargs)
|
||||
|
||||
def run_test(self):
|
||||
"""Execute the test."""
|
||||
self._js_test.run_test()
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying members of a replica set have matching
|
||||
oplogs.
|
||||
"""
|
||||
"""Test hook for verifying members of a replica set have matching oplogs."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -10,13 +7,13 @@ import os.path
|
||||
from . import jsfile
|
||||
|
||||
|
||||
class CheckReplOplogs(jsfile.DataConsistencyHook):
|
||||
"""
|
||||
Checks that local.oplog.rs matches on the primary and secondaries.
|
||||
"""
|
||||
class CheckReplOplogs(jsfile.DataConsistencyHook): # pylint: disable=non-parent-init-called,super-init-not-called
|
||||
"""Check that local.oplog.rs matches on the primary and secondaries."""
|
||||
|
||||
def __init__(self, hook_logger, fixture, shell_options=None):
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, hook_logger, fixture, shell_options=None):
|
||||
"""Initialize CheckReplOplogs."""
|
||||
description = "Check oplogs of all replica set members"
|
||||
js_filename = os.path.join("jstests", "hooks", "run_check_repl_oplogs.js")
|
||||
jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
|
||||
shell_options=shell_options)
|
||||
jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
|
||||
self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying correctness of a secondary's behavior during
|
||||
an unclean shutdown.
|
||||
"""
|
||||
"""Test hook for verifying correctness of secondary's behavior during an unclean shutdown."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -20,15 +17,16 @@ from ... import errors
|
||||
|
||||
|
||||
class PeriodicKillSecondaries(interface.Hook):
|
||||
"""
|
||||
Periodically kills the secondaries in a replica set and verifies
|
||||
that they can reach the SECONDARY state without having connectivity
|
||||
"""Periodically kills the secondaries in a replica set.
|
||||
|
||||
Also verifies that the secondaries can reach the SECONDARY state without having connectivity
|
||||
to the primary after an unclean shutdown.
|
||||
"""
|
||||
|
||||
DEFAULT_PERIOD_SECS = 30
|
||||
|
||||
def __init__(self, hook_logger, rs_fixture, period_secs=DEFAULT_PERIOD_SECS):
|
||||
"""Initialize PeriodicKillSecondaries."""
|
||||
if not isinstance(rs_fixture, replicaset.ReplicaSetFixture):
|
||||
raise TypeError("{} either does not support replication or does not support writing to"
|
||||
" its oplog early".format(rs_fixture.__class__.__name__))
|
||||
@ -46,6 +44,7 @@ class PeriodicKillSecondaries(interface.Hook):
|
||||
self._last_test = None
|
||||
|
||||
def after_suite(self, test_report):
|
||||
"""Run after suite."""
|
||||
if self._start_time is not None:
|
||||
# Ensure that we test killing the secondary and having it reach state SECONDARY after
|
||||
# being restarted at least once when running the suite.
|
||||
@ -54,6 +53,7 @@ class PeriodicKillSecondaries(interface.Hook):
|
||||
self._run(test_report)
|
||||
|
||||
def before_test(self, test, test_report):
|
||||
"""Run before test."""
|
||||
if self._start_time is not None:
|
||||
# The "rsSyncApplyStop" failpoint is already enabled.
|
||||
return
|
||||
@ -66,6 +66,7 @@ class PeriodicKillSecondaries(interface.Hook):
|
||||
self._start_time = time.time()
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""Run after test."""
|
||||
self._last_test = test
|
||||
|
||||
# Kill the secondaries and verify that they can reach the SECONDARY state if the specified
|
||||
@ -116,12 +117,17 @@ class PeriodicKillSecondaries(interface.Hook):
|
||||
|
||||
|
||||
class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
|
||||
def __init__(self, logger, test_name, description, base_test_name, hook, test_report):
|
||||
"""PeriodicKillSecondariesTestCase class."""
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, test_name, description, base_test_name, hook, test_report):
|
||||
"""Initialize PeriodicKillSecondariesTestCase."""
|
||||
interface.DynamicTestCase.__init__(self, logger, test_name, description, base_test_name,
|
||||
hook)
|
||||
self._test_report = test_report
|
||||
|
||||
def run_test(self):
|
||||
"""Run the test."""
|
||||
self._kill_secondaries()
|
||||
self._check_secondaries_and_restart_fixture()
|
||||
|
||||
@ -143,7 +149,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
|
||||
for secondary in self.fixture.get_secondaries():
|
||||
# Disable the "rsSyncApplyStop" failpoint on the secondary to have it resume applying
|
||||
# oplog entries.
|
||||
self._hook._disable_rssyncapplystop(secondary)
|
||||
self._hook._disable_rssyncapplystop(secondary) # pylint: disable=protected-access
|
||||
|
||||
# Wait a little bit for the secondary to start apply oplog entries so that we are more
|
||||
# likely to kill the mongod process while it is partway into applying a batch.
|
||||
@ -229,7 +235,7 @@ class PeriodicKillSecondariesTestCase(interface.DynamicTestCase):
|
||||
self.fixture.setup()
|
||||
self.fixture.await_ready()
|
||||
|
||||
def _check_invariants_as_standalone(self, secondary):
|
||||
def _check_invariants_as_standalone(self, secondary): # pylint: disable=too-many-branches
|
||||
# We remove the --replSet option in order to start the node as a standalone.
|
||||
replset_name = secondary.mongod_options.pop("replSet")
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""
|
||||
Testing hook that periodically makes the primary of a replica set step down.
|
||||
"""
|
||||
"""Test hook that periodically makes the primary of a replica set step down."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
@ -18,15 +16,15 @@ from buildscripts.resmokelib.testing.fixtures import shardedcluster
|
||||
|
||||
|
||||
class ContinuousStepdown(interface.Hook):
|
||||
"""The ContinuousStepdown hook regularly connects to replica sets and sends a replSetStepDown
|
||||
command.
|
||||
"""
|
||||
"""Regularly connect to replica sets and send a replSetStepDown command."""
|
||||
|
||||
DESCRIPTION = ("Continuous stepdown (steps down the primary of replica sets at regular"
|
||||
" intervals)")
|
||||
|
||||
def __init__(self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True,
|
||||
stepdown_duration_secs=10, stepdown_interval_ms=8000):
|
||||
"""Initializes the ContinuousStepdown.
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, hook_logger, fixture, config_stepdown=True, shard_stepdown=True,
|
||||
stepdown_duration_secs=10, stepdown_interval_ms=8000):
|
||||
"""Initialize the ContinuousStepdown.
|
||||
|
||||
Args:
|
||||
hook_logger: the logger instance for this hook.
|
||||
@ -48,6 +46,7 @@ class ContinuousStepdown(interface.Hook):
|
||||
self._stepdown_thread = None
|
||||
|
||||
def before_suite(self, test_report):
|
||||
"""Before suite."""
|
||||
if not self._rs_fixtures:
|
||||
self._add_fixture(self._fixture)
|
||||
self._stepdown_thread = _StepdownThread(self.logger, self._rs_fixtures,
|
||||
@ -57,15 +56,18 @@ class ContinuousStepdown(interface.Hook):
|
||||
self._stepdown_thread.start()
|
||||
|
||||
def after_suite(self, test_report):
|
||||
"""After suite."""
|
||||
self.logger.info("Stopping the stepdown thread.")
|
||||
self._stepdown_thread.stop()
|
||||
|
||||
def before_test(self, test, test_report):
|
||||
"""Before test."""
|
||||
self._check_thread()
|
||||
self.logger.info("Resuming the stepdown thread.")
|
||||
self._stepdown_thread.resume()
|
||||
|
||||
def after_test(self, test, test_report):
|
||||
"""After test."""
|
||||
self._check_thread()
|
||||
self.logger.info("Pausing the stepdown thread.")
|
||||
self._stepdown_thread.pause()
|
||||
@ -92,8 +94,11 @@ class ContinuousStepdown(interface.Hook):
|
||||
self._add_fixture(fixture.configsvr)
|
||||
|
||||
|
||||
class _StepdownThread(threading.Thread):
|
||||
class _StepdownThread(threading.Thread): # pylint: disable=too-many-instance-attributes
|
||||
"""_StepdownThread class."""
|
||||
|
||||
def __init__(self, logger, rs_fixtures, stepdown_interval_secs, stepdown_duration_secs):
|
||||
"""Initialize _StepdownThread."""
|
||||
threading.Thread.__init__(self, name="StepdownThread")
|
||||
self.daemon = True
|
||||
self.logger = logger
|
||||
@ -114,6 +119,7 @@ class _StepdownThread(threading.Thread):
|
||||
self._step_up_stats = collections.Counter()
|
||||
|
||||
def run(self):
|
||||
"""Execute the thread."""
|
||||
if not self._rs_fixtures:
|
||||
self.logger.warning("No replica set on which to run stepdowns.")
|
||||
return
|
||||
@ -135,7 +141,7 @@ class _StepdownThread(threading.Thread):
|
||||
self._wait(wait_secs)
|
||||
|
||||
def stop(self):
|
||||
"""Stops the thread."""
|
||||
"""Stop the thread."""
|
||||
self._is_stopped_evt.set()
|
||||
# Unpause to allow the thread to finish.
|
||||
self.resume()
|
||||
@ -145,7 +151,7 @@ class _StepdownThread(threading.Thread):
|
||||
return self._is_stopped_evt.is_set()
|
||||
|
||||
def pause(self):
|
||||
"""Pauses the thread."""
|
||||
"""Pause the thread."""
|
||||
self._is_resumed_evt.clear()
|
||||
# Wait until we are no longer executing stepdowns.
|
||||
self._is_idle_evt.wait()
|
||||
@ -153,7 +159,7 @@ class _StepdownThread(threading.Thread):
|
||||
self._await_primaries()
|
||||
|
||||
def resume(self):
|
||||
"""Resumes the thread."""
|
||||
"""Resume the thread."""
|
||||
self._is_resumed_evt.set()
|
||||
|
||||
self.logger.info(
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Testing hook for verifying the consistency and integrity of collection
|
||||
and index data.
|
||||
"""
|
||||
"""Test hook for verifying the consistency and integrity of collection and index data."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -11,13 +8,16 @@ from . import jsfile
|
||||
|
||||
|
||||
class ValidateCollections(jsfile.DataConsistencyHook):
|
||||
"""
|
||||
Runs full validation on all collections in all databases on every stand-alone
|
||||
"""Run full validation.
|
||||
|
||||
This will run on all collections in all databases on every stand-alone
|
||||
node, primary replica-set node, or primary shard node.
|
||||
"""
|
||||
|
||||
def __init__(self, hook_logger, fixture, shell_options=None):
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self, hook_logger, fixture, shell_options=None):
|
||||
"""Initialize ValidateCollections."""
|
||||
description = "Full collection validation"
|
||||
js_filename = os.path.join("jstests", "hooks", "run_validate_collections.js")
|
||||
jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description,
|
||||
shell_options=shell_options)
|
||||
jsfile.JSHook.__init__( # pylint: disable=non-parent-init-called
|
||||
self, hook_logger, fixture, js_filename, description, shell_options=shell_options)
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""
|
||||
Enables supports for running tests simultaneously by processing them
|
||||
from a multi-consumer queue.
|
||||
"""
|
||||
"""Enable running tests simultaneously by processing them from a multi-consumer queue."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
@ -13,14 +10,11 @@ from ..utils import queue as _queue
|
||||
|
||||
|
||||
class Job(object):
|
||||
"""
|
||||
Runs tests from a queue.
|
||||
"""
|
||||
"""Run tests from a queue."""
|
||||
|
||||
def __init__(self, logger, fixture, hooks, report, archival, suite_options):
|
||||
"""
|
||||
Initializes the job with the specified fixture and hooks.
|
||||
"""
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, logger, fixture, hooks, report, archival, suite_options):
|
||||
"""Initialize the job with the specified fixture and hooks."""
|
||||
|
||||
self.logger = logger
|
||||
self.fixture = fixture
|
||||
@ -30,9 +24,7 @@ class Job(object):
|
||||
self.suite_options = suite_options
|
||||
|
||||
def __call__(self, queue, interrupt_flag, teardown_flag=None):
|
||||
"""
|
||||
Continuously executes tests from 'queue' and records their
|
||||
details in 'report'.
|
||||
"""Continuously execute tests from 'queue' and records their details in 'report'.
|
||||
|
||||
If 'teardown_flag' is not None, then 'self.fixture.teardown()'
|
||||
will be called before this method returns. If an error occurs
|
||||
@ -47,7 +39,7 @@ class Job(object):
|
||||
# Stop running tests immediately.
|
||||
self.logger.error("Received a StopExecution exception: %s.", err)
|
||||
should_stop = True
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
# Unknown error, stop execution.
|
||||
self.logger.exception("Encountered an error during test execution.")
|
||||
should_stop = True
|
||||
@ -64,15 +56,12 @@ class Job(object):
|
||||
except errors.ServerFailure as err:
|
||||
self.logger.warn("Teardown of %s was not successful: %s", self.fixture, err)
|
||||
teardown_flag.set()
|
||||
except:
|
||||
except: # pylint: disable=bare-except
|
||||
self.logger.exception("Encountered an error while tearing down %s.", self.fixture)
|
||||
teardown_flag.set()
|
||||
|
||||
def _run(self, queue, interrupt_flag):
|
||||
"""
|
||||
Calls the before/after suite hooks and continuously executes
|
||||
tests from 'queue'.
|
||||
"""
|
||||
"""Call the before/after suite hooks and continuously execute tests from 'queue'."""
|
||||
|
||||
for hook in self.hooks:
|
||||
hook.before_suite(self.report)
|
||||
@ -91,9 +80,7 @@ class Job(object):
|
||||
hook.after_suite(self.report)
|
||||
|
||||
def _execute_test(self, test):
|
||||
"""
|
||||
Calls the before/after test hooks and executes 'test'.
|
||||
"""
|
||||
"""Call the before/after test hooks and execute 'test'."""
|
||||
|
||||
test.configure(self.fixture, config.NUM_CLIENTS_PER_FIXTURE)
|
||||
self._run_hooks_before_tests(test)
|
||||
@ -101,26 +88,26 @@ class Job(object):
|
||||
test(self.report)
|
||||
try:
|
||||
if self.suite_options.fail_fast and not self.report.wasSuccessful():
|
||||
self.logger.info("%s failed, so stopping..." % (test.shortDescription()))
|
||||
raise errors.StopExecution("%s failed" % (test.shortDescription()))
|
||||
self.logger.info("%s failed, so stopping..." % (test.short_description()))
|
||||
raise errors.StopExecution("%s failed" % (test.short_description()))
|
||||
|
||||
if not self.fixture.is_running():
|
||||
self.logger.error(
|
||||
"%s marked as a failure because the fixture crashed during the test.",
|
||||
test.shortDescription())
|
||||
test.short_description())
|
||||
self.report.setFailure(test, return_code=2)
|
||||
# Always fail fast if the fixture fails.
|
||||
raise errors.StopExecution("%s not running after %s" % (self.fixture,
|
||||
test.shortDescription()))
|
||||
test.short_description()))
|
||||
finally:
|
||||
success = self.report._find_test_info(test).status == "pass"
|
||||
success = self.report.find_test_info(test).status == "pass"
|
||||
if self.archival:
|
||||
self.archival.archive(self.logger, test, success)
|
||||
|
||||
self._run_hooks_after_tests(test)
|
||||
|
||||
def _run_hook(self, hook, hook_function, test):
|
||||
""" Helper to run hook and archival. """
|
||||
"""Provide helper to run hook and archival."""
|
||||
try:
|
||||
success = False
|
||||
hook_function(test, self.report)
|
||||
@ -130,8 +117,7 @@ class Job(object):
|
||||
self.archival.archive(self.logger, test, success, hook=hook)
|
||||
|
||||
def _run_hooks_before_tests(self, test):
|
||||
"""
|
||||
Runs the before_test method on each of the hooks.
|
||||
"""Run the before_test method on each of the hooks.
|
||||
|
||||
Swallows any TestFailure exceptions if set to continue on
|
||||
failure, and reraises any other exceptions.
|
||||
@ -145,13 +131,13 @@ class Job(object):
|
||||
|
||||
except errors.ServerFailure:
|
||||
self.logger.exception("%s marked as a failure by a hook's before_test.",
|
||||
test.shortDescription())
|
||||
test.short_description())
|
||||
self._fail_test(test, sys.exc_info(), return_code=2)
|
||||
raise errors.StopExecution("A hook's before_test failed")
|
||||
|
||||
except errors.TestFailure:
|
||||
self.logger.exception("%s marked as a failure by a hook's before_test.",
|
||||
test.shortDescription())
|
||||
test.short_description())
|
||||
self._fail_test(test, sys.exc_info(), return_code=1)
|
||||
if self.suite_options.fail_fast:
|
||||
raise errors.StopExecution("A hook's before_test failed")
|
||||
@ -164,8 +150,7 @@ class Job(object):
|
||||
raise
|
||||
|
||||
def _run_hooks_after_tests(self, test):
|
||||
"""
|
||||
Runs the after_test method on each of the hooks.
|
||||
"""Run the after_test method on each of the hooks.
|
||||
|
||||
Swallows any TestFailure exceptions if set to continue on
|
||||
failure, and reraises any other exceptions.
|
||||
@ -179,13 +164,13 @@ class Job(object):
|
||||
|
||||
except errors.ServerFailure:
|
||||
self.logger.exception("%s marked as a failure by a hook's after_test.",
|
||||
test.shortDescription())
|
||||
test.short_description())
|
||||
self.report.setFailure(test, return_code=2)
|
||||
raise errors.StopExecution("A hook's after_test failed")
|
||||
|
||||
except errors.TestFailure:
|
||||
self.logger.exception("%s marked as a failure by a hook's after_test.",
|
||||
test.shortDescription())
|
||||
test.short_description())
|
||||
self.report.setFailure(test, return_code=1)
|
||||
if self.suite_options.fail_fast:
|
||||
raise errors.StopExecution("A hook's after_test failed")
|
||||
@ -195,9 +180,7 @@ class Job(object):
|
||||
raise
|
||||
|
||||
def _fail_test(self, test, exc_info, return_code=1):
|
||||
"""
|
||||
Helper to record a test as a failure with the provided return
|
||||
code.
|
||||
"""Provide helper to record a test as a failure with the provided return code.
|
||||
|
||||
This method should not be used if 'test' has already been
|
||||
started, instead use TestReport.setFailure().
|
||||
@ -210,10 +193,9 @@ class Job(object):
|
||||
|
||||
@staticmethod
|
||||
def _drain_queue(queue):
|
||||
"""
|
||||
Removes all elements from 'queue' without actually doing
|
||||
anything to them. Necessary to unblock the main thread that is
|
||||
waiting for 'queue' to be empty.
|
||||
"""Remove all elements from 'queue' without actually doing anything to them.
|
||||
|
||||
Necessary to unblock the main thread that is waiting for 'queue' to be empty.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user