mirror of
https://github.com/mongodb/mongo.git
synced 2024-11-21 12:39:08 +01:00
SERVER-76751 Pinning all python deps with poetry
This commit is contained in:
parent
dd7927a4ae
commit
13f208e8c4
39
SConstruct
39
SConstruct
@ -1515,13 +1515,6 @@ env_vars.Add(
|
||||
default='auto',
|
||||
)
|
||||
|
||||
env_vars.Add(
|
||||
PathVariable(
|
||||
'VALIDATE_ENV_SCRIPT',
|
||||
help='''Path of a python script to validate the mongo workspace for common issues.
|
||||
An example script is located at buildscripts/validate_env.py
|
||||
''', default=None, validator=PathVariable.PathIsFile))
|
||||
|
||||
env_vars.Add(
|
||||
'WINDOWS_OPENSSL_BIN',
|
||||
help='Sets the path to the openssl binaries for packaging',
|
||||
@ -1771,41 +1764,9 @@ else:
|
||||
env.FatalError(f"Error setting VERBOSE variable: {e}")
|
||||
env.AddMethod(lambda env: env['VERBOSE'], 'Verbose')
|
||||
|
||||
|
||||
def CheckDevEnv(context):
|
||||
context.Message('Checking if dev env is valid... ')
|
||||
context.sconf.cached = 0
|
||||
if env.get('VALIDATE_ENV_SCRIPT'):
|
||||
proc = subprocess.run(
|
||||
[sys.executable, env.File('$VALIDATE_ENV_SCRIPT').get_path()], capture_output=True,
|
||||
text=True)
|
||||
context.Log(proc.stdout)
|
||||
context.Log(proc.stderr)
|
||||
context.sconf.lastTarget = Value(proc.stdout + proc.stderr)
|
||||
result = proc.returncode == 0
|
||||
context.Result(result)
|
||||
if env.Verbose():
|
||||
print(proc.stdout)
|
||||
else:
|
||||
context.Result("skipped")
|
||||
result = True
|
||||
return result
|
||||
|
||||
|
||||
env.Append(
|
||||
LINKFLAGS=['${_concat(COMPILER_EXEC_PREFIX_OPT, LINKFLAGS_COMPILER_EXEC_PREFIX, "", __env__)}'])
|
||||
|
||||
devenv_check = Configure(
|
||||
env,
|
||||
help=False,
|
||||
custom_tests={
|
||||
'CheckDevEnv': CheckDevEnv,
|
||||
},
|
||||
)
|
||||
if not devenv_check.CheckDevEnv():
|
||||
env.ConfError(f"Failed to validate dev env:\n{devenv_check.lastTarget.get_contents().decode()}")
|
||||
devenv_check.Finish()
|
||||
|
||||
# Normalize the ICECC_DEBUG option
|
||||
try:
|
||||
env['ICECC_DEBUG'] = to_boolean(env['ICECC_DEBUG'])
|
||||
|
@ -28,7 +28,7 @@ RUN python -m pip install --upgrade pip wheel
|
||||
# copy resmoke, make the venv, and pip install
|
||||
COPY src /resmoke
|
||||
|
||||
RUN bash -c "cd /resmoke && python -m venv python3-venv && . python3-venv/bin/activate && pip install --upgrade pip wheel && pip install -r ./buildscripts/requirements.txt"
|
||||
RUN bash -c "cd /resmoke && python3 -m venv python3-venv && . python3-venv/bin/activate && python3 -m pip install 'poetry==1.5.1' && python3 -m poetry install --no-root --sync"
|
||||
|
||||
# copy the mongo binary -- make sure it is executable
|
||||
COPY mongo /usr/bin
|
||||
|
@ -133,6 +133,7 @@ def main():
|
||||
FunctionCall("kill processes"),
|
||||
FunctionCall("cleanup environment"),
|
||||
FunctionCall("set up venv"),
|
||||
FunctionCall("set up libdeps venv"),
|
||||
FunctionCall("upload pip requirements"),
|
||||
FunctionCall("f_expansions_write"),
|
||||
FunctionCall("configure evergreen api credentials"),
|
||||
|
@ -51,7 +51,7 @@ Which will give an output similar to this:
|
||||
|
||||
The graph visualizer tools starts up a web service to provide a frontend GUI to navigating and examining the graph files. The Visualizer used a Python Flask backend and React Javascript frontend. You will need to install the libdeps requirements file to python to run the backend:
|
||||
|
||||
python3 -m pip install -r etc/pip/libdeps-requirements.txt
|
||||
python3 -m poetry install --no-root --sync -E libdeps
|
||||
|
||||
For installing the dependencies for the frontend, you will need node >= 12.0.0 and npm installed and in the PATH. To install the dependencies navigate to directory where package.json lives, and run:
|
||||
|
||||
|
@ -138,7 +138,7 @@ Could not find the correct version of linter '%s', expected '%s'. Check your
|
||||
PATH environment variable or re-run with --verbose for more information.
|
||||
|
||||
To fix, install the needed python modules for Python 3.x:
|
||||
python3 -m pip install -r etc/pip/lint-requirements.txt
|
||||
python3 -m poetry install --no-root --sync
|
||||
|
||||
These commands are typically available via packages with names like python-pip,
|
||||
or python3-pip. See your OS documentation for help.
|
||||
|
@ -1,3 +0,0 @@
|
||||
# This file is for the convenience of users who previously relied on buildscripts/requirements.txt.
|
||||
# For future use, please use etc/pip/dev-requirements.txt instead.
|
||||
-r ../etc/pip/dev-requirements.txt
|
@ -38,7 +38,7 @@ class SetUpEC2Instance(PowercycleCommand):
|
||||
|
||||
# Second operation -
|
||||
# Copy buildscripts and mongoDB executables to the remote host.
|
||||
files = ["etc", "buildscripts", "dist-test/bin"]
|
||||
files = ["etc", "buildscripts", "dist-test/bin", "poetry.lock", "pyproject.toml"]
|
||||
|
||||
shared_libs = "dist-test/lib"
|
||||
if os.path.isdir(shared_libs):
|
||||
@ -59,7 +59,8 @@ class SetUpEC2Instance(PowercycleCommand):
|
||||
cmds = f"{cmds}; $python_loc -m venv --system-site-packages {venv}"
|
||||
cmds = f"{cmds}; activate=$(find {venv} -name 'activate')"
|
||||
cmds = f"{cmds}; . $activate"
|
||||
cmds = f"{cmds}; pip3 install -r $remote_dir/etc/pip/powercycle-requirements.txt"
|
||||
cmds = f"{cmds}; python3 -m pip install 'poetry==1.5.1'"
|
||||
cmds = f"{cmds}; pushd $remote_dir && python3 -m poetry install --no-root --sync && popd"
|
||||
|
||||
self.remote_op.operation(SSHOperation.SHELL, cmds, retry=True, retry_count=retry_count)
|
||||
|
||||
|
@ -22,7 +22,7 @@ sys.path = [SCONS_DIR, SITE_TOOLS_DIR] + sys.path
|
||||
from mongo.pip_requirements import verify_requirements, MissingRequirements
|
||||
|
||||
try:
|
||||
verify_requirements('etc/pip/compile-requirements.txt')
|
||||
verify_requirements()
|
||||
except MissingRequirements as ex:
|
||||
print(ex)
|
||||
sys.exit(1)
|
||||
|
@ -180,7 +180,15 @@ fi' >> .bash_profile
|
||||
ln -s ${SRC_DIR}/buildscripts buildscripts
|
||||
|
||||
# Install pymongo to get the bson library for pretty-printers.
|
||||
${TOOLCHAIN_ROOT}/bin/pip3 install -r ${SRC_DIR}/etc/pip/dev-requirements.txt &
|
||||
# This is copied from evergreen/functions/venv_setup.sh
|
||||
if [ "Windows_NT" = "$OS" ]; then
|
||||
${TOOLCHAIN_ROOT}/bin/python3 -m pip install "poetry==1.5.1" "charset-normalizer==2.0.12"
|
||||
else
|
||||
${TOOLCHAIN_ROOT}/bin/python3 -m pip install "poetry==1.5.1"
|
||||
fi
|
||||
pushd ${SRC_DIR}
|
||||
${TOOLCHAIN_ROOT}/bin/python3 -m poetry install --no-root --sync &
|
||||
popd
|
||||
else
|
||||
archive_fail "src"
|
||||
fi
|
||||
|
@ -1,175 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
import git
|
||||
import yaml
|
||||
|
||||
REQUIREMENTS_PATH = "buildscripts/requirements.txt"
|
||||
GIT_ORG = "10gen"
|
||||
ENTERPRISE_PATH = "src/mongo/db/modules/enterprise"
|
||||
VENV_PATH = "python3-venv/bin/activate"
|
||||
# alert user if less than 10gb of space left
|
||||
STORAGE_AMOUNT = 10
|
||||
# REQUIREMENTS_PATH = "buildscripts/requirements.txt"
|
||||
LATEST_RELEASES = "https://raw.githubusercontent.com/mongodb/mongo/master/src/mongo/util/version/releases.yml"
|
||||
|
||||
# determine the path of the mongo directory
|
||||
# this assumes the location of this script is in the buildscripts directory
|
||||
buildscripts_path = os.path.dirname(os.path.realpath(__file__))
|
||||
mongo_path = os.path.split(buildscripts_path)[0]
|
||||
sys.path.append(mongo_path)
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from site_scons.mongo.pip_requirements import verify_requirements, MissingRequirements
|
||||
from buildscripts.resmokelib.utils import evergreen_conn
|
||||
|
||||
|
||||
def check_cwd() -> int:
|
||||
print("Checking if current directory is mongo root directory...")
|
||||
if os.getcwd() != mongo_path:
|
||||
print("ERROR: We do not support building outside of the mongo root directory.")
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# checks if the script is being run inside of a python venv or not
|
||||
def in_virtualenv() -> bool:
|
||||
base_prefix = getattr(sys, "base_prefix", None) or getattr(sys, "real_prefix",
|
||||
None) or sys.prefix
|
||||
return base_prefix != sys.prefix
|
||||
|
||||
|
||||
def get_git_repo(path: str, repo_name: str) -> git.Repo:
|
||||
try:
|
||||
return git.Repo(path)
|
||||
except git.exc.NoSuchPathError:
|
||||
print(f"ERROR: Count not find {repo_name} git repo at {mongo_path}")
|
||||
print("Make sure your validate_env.py file is in the buildscripts directory")
|
||||
return None
|
||||
|
||||
|
||||
# returns the hash of the most recent commit that is shared between upstream and HEAD
|
||||
def get_common_hash(repo: git.Repo, repo_name: str) -> str:
|
||||
if not repo:
|
||||
return None
|
||||
|
||||
upstream_remote = None
|
||||
|
||||
# determine which remote is pointed to the 10gen github repo
|
||||
for remote in repo.remotes:
|
||||
if remote.url.endswith(f"{GIT_ORG}/{repo_name}.git"):
|
||||
upstream_remote = remote
|
||||
break
|
||||
|
||||
if upstream_remote is None:
|
||||
print("ERROR: Could not find remote for:", f"{GIT_ORG}/{repo_name}")
|
||||
return None
|
||||
|
||||
upstream_remote.fetch("master")
|
||||
common_hash = repo.merge_base("HEAD", f"{upstream_remote.name}/master")
|
||||
|
||||
if not common_hash or len(common_hash) == 0:
|
||||
print(f"ERROR: Could not find common hash for {repo_name}")
|
||||
return None
|
||||
|
||||
return common_hash[0]
|
||||
|
||||
|
||||
def check_git_repos() -> int:
|
||||
print("Checking if mongo repo and enterprise module repo are in sync...")
|
||||
mongo_repo = get_git_repo(mongo_path, "mongo")
|
||||
mongo_hash = get_common_hash(mongo_repo, "mongo")
|
||||
enterprise_dir = os.path.join(mongo_path, ENTERPRISE_PATH)
|
||||
enterprise_repo = get_git_repo(enterprise_dir, "mongo-enterprise-modules")
|
||||
enterprise_hash = get_common_hash(enterprise_repo, "mongo-enterprise-modules")
|
||||
|
||||
if not mongo_hash or not enterprise_hash:
|
||||
return 1
|
||||
|
||||
evg_api = evergreen_conn.get_evergreen_api(Path.home() / '.evergreen.yml')
|
||||
manifest = evg_api.manifest("mongodb-mongo-master", mongo_hash)
|
||||
modules = manifest.modules
|
||||
if "enterprise" in modules and str(enterprise_hash) != modules["enterprise"].revision:
|
||||
synced_enterprise_hash = modules["enterprise"].revision
|
||||
print("Error: the mongo repo and enterprise module repo are out of sync")
|
||||
print(
|
||||
f"Try `git fetch; git rebase --onto {synced_enterprise_hash}` in the enterprise repo directory"
|
||||
)
|
||||
print(f"Your enterprise repo directory is {enterprise_dir}")
|
||||
return 1
|
||||
|
||||
# Check if the git tag is out of date
|
||||
# https://mongodb.stackenterprise.co/questions/145
|
||||
print("Checking if your mongo repo git tag is up to date...")
|
||||
releases_page = urllib.request.urlopen(LATEST_RELEASES)
|
||||
page_bytes = releases_page.read()
|
||||
text = page_bytes.decode("utf-8")
|
||||
parsed_text = yaml.safe_load(text)
|
||||
compat_versions = parsed_text['featureCompatibilityVersions']
|
||||
if not compat_versions or len(compat_versions) <= 1:
|
||||
print(
|
||||
"ERROR: Something went wrong, there are not at least two feature compatibility mongo versions"
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
# Hard coded to the second-to-last version because the last version should be the test version
|
||||
target_version = compat_versions[-2]
|
||||
local_version = mongo_repo.git.describe()
|
||||
# get the version info we want out of git describe
|
||||
trimmed_local_version = re.search("([0-9]+\\.[0-9]+)(\\.[0-9])?", local_version).group(1)
|
||||
if trimmed_local_version != target_version:
|
||||
print(
|
||||
"ERROR: Your git tag is out of date, run `git config remote.origin.tagOpt '--tags'; git fetch origin master`"
|
||||
)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
# check for missing dependencies
|
||||
def check_dependencies() -> int:
|
||||
print("Checking for missing dependencies...")
|
||||
requirements_file_path = os.path.join(mongo_path, REQUIREMENTS_PATH)
|
||||
try:
|
||||
with contextlib.redirect_stdout(None):
|
||||
verify_requirements(requirements_file_path)
|
||||
except MissingRequirements as ex:
|
||||
print(ex)
|
||||
print(
|
||||
f"ERROR: Found missing dependencies, run `python -m pip install -r {REQUIREMENTS_PATH}`"
|
||||
)
|
||||
if not in_virtualenv():
|
||||
print(
|
||||
"WARNING: you are not in a python venv, we recommend using one to handle your requirements."
|
||||
)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def check_space() -> int:
|
||||
print("Checking if there is enough disk space to build...")
|
||||
# Get the filesystem information where the mongo directory lies
|
||||
statvfs = os.statvfs(mongo_path)
|
||||
free_bytes = statvfs.f_frsize * statvfs.f_bfree
|
||||
free_gb = (free_bytes // 1000) / 1000
|
||||
|
||||
# Warn if there is a low amount of space left in the filesystem
|
||||
if free_gb < STORAGE_AMOUNT:
|
||||
print(f"WARNING: only {free_gb}GB of space left in filesystem")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if any([check_cwd(), check_git_repos(), check_dependencies(), check_space()]):
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
# More requirements can be added as new issues appear
|
@ -229,7 +229,7 @@ To use the post-build tools, you must first build the libdeps dependency graph b
|
||||
You must also install the requirements file:
|
||||
|
||||
```
|
||||
python3 -m pip install -r etc/pip/libdeps-requirements.txt
|
||||
python3 -m poetry install --no-root --sync -E libdeps
|
||||
```
|
||||
|
||||
After the graph file is created, it can be used as input into the `gacli` tool to perform linting and analysis on the complete dependency graph. The `gacli` tool has options for what types of analysis to perform. A complete list can be found using the `--help` option. Minimally, you can run the `gacli` tool by just passing the graph file you wish to analyze:
|
||||
|
@ -294,7 +294,7 @@ The libdeps linter also has the `--libdeps-linting=print` option which will perf
|
||||
The dependency graph can be analyzed post-build by leveraging the completeness of the graph to perform more extensive analysis. You will need to install the libdeps requirements file to python when attempting to use the post-build analysis tools:
|
||||
|
||||
```
|
||||
python3 -m pip install -r etc/pip/libdeps-requirements.txt
|
||||
python3 -m poetry install --no-root --sync -E libdeps
|
||||
```
|
||||
|
||||
The command line interface tool (gacli) has a comprehensive help text which will describe the available analysis options and interface. The visualizer tool includes a GUI which displays the available analysis options graphically. These tools will be briefly covered in the following sections.
|
||||
|
@ -48,12 +48,16 @@ modules must be installed. Python 3 is included in macOS 10.15 and later.
|
||||
For earlier macOS versions, Python 3 can be installed using Homebrew or
|
||||
MacPorts or similar.
|
||||
|
||||
To install the required Python modules, run:
|
||||
|
||||
$ python3 -m pip install -r etc/pip/compile-requirements.txt
|
||||
MongoDB manages our python dependencies with poetry.
|
||||
You can see other install instructions for poetry by reading this [install guide](https://python-poetry.org/).
|
||||
|
||||
Installing the requirements inside a python3 based virtualenv
|
||||
dedicated to building MongoDB is recommended.
|
||||
dedicated to building MongoDB is optional but recommended.
|
||||
|
||||
$ python3 -m venv <venv_path> --prompt mongo # Optional (venv_path can be a path of your choice)
|
||||
$ source <venv_path>/bin/activate # Optional (might be slightly different based on the your shell)
|
||||
$ python3 -m pip install poetry
|
||||
$ python3 -m poetry install --no-root --sync
|
||||
|
||||
Note: In order to compile C-based Python modules, you'll also need the
|
||||
Python and OpenSSL C headers. Run:
|
||||
@ -62,6 +66,9 @@ Python and OpenSSL C headers. Run:
|
||||
* Ubuntu (20.04 and newer)/Debian (Bullseye and newer) - `apt install python-dev-is-python3 libssl-dev`
|
||||
* Ubuntu (18.04 and older)/Debian (Buster and older) - `apt install python3.7-dev libssl-dev`
|
||||
|
||||
Note: If you are seeing errors involving "Prompt dismissed.." you might need to run the following command before poetry install.
|
||||
|
||||
$ export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring
|
||||
|
||||
SCons
|
||||
---------------
|
||||
|
@ -2836,7 +2836,6 @@ buildvariants:
|
||||
- name: validate_commit_message
|
||||
- name: lint_large_files_check
|
||||
- name: check_feature_flag_tags
|
||||
- name: compile_venv_deps_check
|
||||
- name: resmoke_validation_tests
|
||||
- name: version_gen_validation
|
||||
distros:
|
||||
|
@ -839,8 +839,16 @@ functions:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/functions/venv_setup.sh"
|
||||
env:
|
||||
pip_dir: ${pip_dir}
|
||||
|
||||
# This needs to be run after "set up venv"
|
||||
# This depends on having a venv already setup
|
||||
# This just installs the extra deps needed for libdeps
|
||||
"set up libdeps venv": &set_up_libdeps_venv
|
||||
command: subprocess.exec
|
||||
params:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/libdeps_setup.sh"
|
||||
|
||||
"upload pip requirements": &upload_pip_requirements
|
||||
command: s3.put
|
||||
@ -2699,21 +2707,6 @@ tasks:
|
||||
content_type: application/tar
|
||||
display_name: Benchmarks
|
||||
|
||||
- name: compile_venv_deps_check
|
||||
commands:
|
||||
- *f_expansions_write
|
||||
- command: manifest.load
|
||||
- func: "git get project and add git tag"
|
||||
- *f_expansions_write
|
||||
- *kill_processes
|
||||
- *cleanup_environment
|
||||
- command: subprocess.exec
|
||||
type: test
|
||||
params:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/compile_venv_dependency_check.sh"
|
||||
|
||||
- name: determine_patch_tests
|
||||
commands:
|
||||
- *f_expansions_write
|
||||
@ -2776,7 +2769,6 @@ tasks:
|
||||
- "./etc/evergreen_nightly.yml"
|
||||
- "./etc/evergreen.yml"
|
||||
- "./etc/evergreen_yml_components/**"
|
||||
- "./etc/pip/**"
|
||||
- "./etc/repo_config.yaml"
|
||||
- "./etc/scons/**"
|
||||
- "buildscripts/**"
|
||||
@ -2794,6 +2786,8 @@ tasks:
|
||||
- "src/third_party/JSON-Schema-Test-Suite/tests/draft4/**"
|
||||
- "src/third_party/mock_ocsp_responder/**"
|
||||
- "src/third_party/schemastore.org/**"
|
||||
- "poetry.lock"
|
||||
- "pyproject.toml"
|
||||
exclude_files:
|
||||
- "*_test.pdb"
|
||||
|
||||
@ -5502,11 +5496,6 @@ tasks:
|
||||
- *f_expansions_write
|
||||
- func: "do setup"
|
||||
- func: "f_expansions_write"
|
||||
- command: subprocess.exec
|
||||
params:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/external_auth_pip.sh"
|
||||
- func: "run tests"
|
||||
vars:
|
||||
resmoke_jobs_max: ${external_auth_jobs_max|4}
|
||||
@ -5525,11 +5514,6 @@ tasks:
|
||||
silent: true
|
||||
args:
|
||||
- "src/evergreen/external_auth_aws_setup.sh"
|
||||
- command: subprocess.exec
|
||||
params:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/external_auth_aws_pip.sh"
|
||||
- func: "run tests"
|
||||
|
||||
- <<: *task_template
|
||||
@ -5539,11 +5523,6 @@ tasks:
|
||||
- *f_expansions_write
|
||||
- func: "do setup"
|
||||
- func: "f_expansions_write"
|
||||
- command: subprocess.exec
|
||||
params:
|
||||
binary: bash
|
||||
args:
|
||||
- "src/evergreen/external_auth_pip.sh"
|
||||
- command: subprocess.exec
|
||||
params:
|
||||
binary: bash
|
||||
|
@ -1,51 +0,0 @@
|
||||
## On requirements (`*-requirements.txt`) files
|
||||
|
||||
MongoDB requires multiple pypa projects installed to build and test. To that end, we provide our own
|
||||
`*-requirements.txt` files for specific domains of use. Inside each requirements file, there are
|
||||
only include statements for component files. These files are the bare requirements for specific
|
||||
components of our python environment. This separation allows us to avoid repetition and conflict in
|
||||
our requirements across components.
|
||||
|
||||
For most developers, if you pip-install `dev-requirements.txt`, you have the python requirements to
|
||||
lint, build, and test MongoDB.
|
||||
|
||||
## How to modify a pypa project requirement in a component
|
||||
|
||||
The most common edit of our requirements is likely a change to the constraints on a pypa project
|
||||
that we already use. For example, say that we currently require `pymongo >= 3.0, < 3.6.0` in the
|
||||
component `core`. You would like to use PyMongo 3.7, so you instead modify the line in
|
||||
`etc/pip/components/core.req` to read `pymongo >= 3.0, != 3.6.0`.
|
||||
|
||||
## How to add a new component (`*.req`) file
|
||||
|
||||
Occasionally, we will require a set of pypa projects for an entirely new piece of software in our
|
||||
repository. This usually implies adding a new component file. For example, say that we need to add
|
||||
a logging system to both local development and evergreen. This system requires the fictional pypa
|
||||
project `FooLog`. So we add a file `foolog.req` and require it from both `dev-requirements.txt` and
|
||||
`evgtest-requirements.txt`. Like the majority of our components, we want it in the toolchain, so we
|
||||
also add it to `toolchain-requirements.txt`. The workflow will usually look like:
|
||||
|
||||
```
|
||||
$ # Make the component file
|
||||
$ echo "FooLog" >etc/pip/components/foolog.req
|
||||
$ # Require the component from the requirements files
|
||||
$ echo "-r components/foolog.req" >>etc/pip/dev-requirements.txt
|
||||
$ echo "-r components/foolog.req" >>etc/pip/evgtest-requirements.txt
|
||||
$ echo "-r components/foolog.req" >>etc/pip/toolchain-requirements.txt
|
||||
```
|
||||
|
||||
## How to add a new requirements (`*-requirements.txt`) file
|
||||
|
||||
Rarely, we will have an entirely new domain of requirements that is useful. In this case, we need to
|
||||
at least make a new requirements file. For example, say we want to make a requirements file for
|
||||
packaging our code. We would need most of the requirements for `dev-requirements.txt` but the
|
||||
testing has already been done in our continuous integration. So we create a new file
|
||||
`package-requirements.txt` and require a smaller subset of components. The new file at
|
||||
`etc/pip/package-requirements.txt` would look like this:
|
||||
```
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/compile.req
|
||||
-r components/lint.req
|
||||
```
|
@ -1,6 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/compile.req
|
||||
|
||||
-r components/tooling_metrics.req
|
@ -1,2 +0,0 @@
|
||||
boto3 <= 1.20.26
|
||||
botocore <= 1.23.26
|
@ -1,5 +0,0 @@
|
||||
psutil
|
||||
jsonschema
|
||||
memory_profiler
|
||||
puremagic
|
||||
tabulate
|
@ -1,6 +0,0 @@
|
||||
# Mongo compile
|
||||
Cheetah3 <= 3.2.6.post1 # src/mongo/base/generate_error_codes.py
|
||||
packaging <= 21.3
|
||||
regex <= 2021.11.10
|
||||
requirements_parser <= 0.3.1
|
||||
setuptools
|
@ -1,7 +0,0 @@
|
||||
# Core (we need these for most buildscripts)
|
||||
psutil <= 5.8.0
|
||||
pymongo == 4.3.3
|
||||
PyYAML == 5.3.1 # Pinned due to SERVER-79126 and linked BF
|
||||
types-PyYAML ~= 6.0.12.10
|
||||
requests >= 2.0.0, <= 2.26.0
|
||||
typing-extensions >= 3.7.4
|
@ -1,6 +0,0 @@
|
||||
click ~= 7.0
|
||||
dataclasses; python_version < "3.7"
|
||||
inject ~= 4.3.1
|
||||
GitPython ~= 3.1.7
|
||||
pydantic ~= 1.8.2
|
||||
structlog ~= 23.1.0
|
@ -1,10 +0,0 @@
|
||||
# These are the dependencies of ldaptor
|
||||
passlib == 1.7.4
|
||||
pyOpenSSL == 19.0.0; platform_machine == "s390x" or platform_machine == "ppc64le" # Needed for pinned cryptography package - see SERVER-70845
|
||||
pyOpenSSL == 22.0.0; platform_machine != "s390x" and platform_machine != "ppc64le"
|
||||
pyparsing == 2.4.0
|
||||
service_identity == 18.1.0
|
||||
twisted == 21.2.0
|
||||
zope.interface == 5.0.0
|
||||
|
||||
ldaptor == 19.0.0
|
@ -1,2 +0,0 @@
|
||||
unittest-xml-reporting >= 2.2.0, <= 3.0.4
|
||||
packaging <= 21.3
|
@ -1,5 +0,0 @@
|
||||
# TIG jira integration
|
||||
jira <= 3.1.1
|
||||
requests-oauth <= 0.4.1
|
||||
PyJWT <= 2.3.0 # https://github.com/pycontribs/jira/issues/247
|
||||
# We are omitting pycrypto based on https://github.com/pycontribs/jira/pull/629
|
@ -1,9 +0,0 @@
|
||||
networkx
|
||||
flask
|
||||
flask_cors
|
||||
lxml
|
||||
eventlet
|
||||
gevent
|
||||
progressbar2
|
||||
cxxfilt
|
||||
pympler
|
@ -1,13 +0,0 @@
|
||||
# Linters
|
||||
# Note: These versions are checked by python modules in buildscripts/linter/
|
||||
GitPython ~= 3.1.31
|
||||
mypy ~= 1.3.0
|
||||
pydocstyle == 6.3.0
|
||||
pylint == 2.17.4
|
||||
structlog ~= 23.1.0
|
||||
yamllint == 1.32.0
|
||||
yapf == 0.26.0 # latest is 0.40.1, but that causes CI failures
|
||||
types-setuptools == 57.4.12 # latest is 68.0.0.0, but that causes pip install requirements to fail
|
||||
types-requests == 2.31.0.1
|
||||
tqdm
|
||||
colorama
|
@ -1,9 +0,0 @@
|
||||
# Platform-specific components
|
||||
pypiwin32>=223; sys_platform == "win32" and python_version > "3"
|
||||
pywin32>=225; sys_platform == "win32" and python_version > "3"
|
||||
|
||||
cryptography == 2.3; platform_machine == "s390x" or platform_machine == "ppc64le" # Needed for oauthlib to use RSAAlgorithm # Version locked - see SERVER-36618
|
||||
cryptography == 36.0.2; platform_machine != "s390x" and platform_machine != "ppc64le"
|
||||
|
||||
mongo-ninja-python == 1.11.1.5; (platform_machine == "x86_64" or platform_machine == "aarch64") and sys_platform == "linux"
|
||||
ninja >= 1.10.0; (platform_machine != "x86_64" and platform_machine != "aarch64") or sys_platform != "linux"
|
@ -1,34 +0,0 @@
|
||||
curatorbin == 1.2.3
|
||||
PyKMIP == 0.10.0
|
||||
evergreen.py == 3.4.4
|
||||
jinja2 <= 2.11.3
|
||||
MarkupSafe == 1.1.0 # See SERVER-57036, this is a transitive dependency of jinja2
|
||||
mock <= 4.0.3
|
||||
shrub.py == 1.1.4
|
||||
ocspresponder == 0.5.0
|
||||
flask == 1.1.1
|
||||
itsdangerous == 2.0.0
|
||||
ocspbuilder == 0.10.2
|
||||
Werkzeug == 2.0.3
|
||||
blackduck == 1.0.1
|
||||
PyGithub == 1.53
|
||||
urllib3 >= 1.26.0, <= 1.26.7
|
||||
distro == 1.5.0
|
||||
dnspython == 2.1.0
|
||||
proxy-protocol == 0.7.5
|
||||
pkce == 1.0.3
|
||||
oauthlib == 3.1.1
|
||||
requests-oauthlib == 1.3.0
|
||||
packaging <= 21.3
|
||||
docker
|
||||
mongomock == 4.1.2
|
||||
pyjwt
|
||||
selenium
|
||||
geckodriver-autoinstaller
|
||||
pipx==1.2.0
|
||||
pigz-python
|
||||
opentelemetry-api
|
||||
opentelemetry-sdk
|
||||
# TODO: EVG-20576
|
||||
# Once the above is merged we can get rid of our dependency on grpc
|
||||
opentelemetry-exporter-otlp-proto-grpc; platform_machine != "darwin" # TODO: SERVER-80336 this should work on macosx
|
@ -1 +0,0 @@
|
||||
mongo-tooling-metrics == 1.0.8
|
@ -1,2 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
@ -1,12 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/compile.req
|
||||
-r components/lint.req
|
||||
-r components/testing.req
|
||||
-r components/external_auth.req
|
||||
-r components/evergreen.req
|
||||
-r components/aws.req
|
||||
-r components/jiraclient.req
|
||||
-r components/build_metrics.req
|
||||
-r components/tooling_metrics.req
|
@ -1,7 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/testing.req
|
||||
-r components/aws.req
|
||||
-r components/idl.req
|
||||
-r components/evergreen.req
|
@ -1 +0,0 @@
|
||||
-r components/external_auth.req
|
@ -1,4 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/jiraclient.req
|
@ -1 +0,0 @@
|
||||
-r components/libdeps.req
|
@ -1,4 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
|
||||
-r components/lint.req
|
@ -1,7 +0,0 @@
|
||||
-r components/platform.req
|
||||
-r components/core.req
|
||||
-r components/evergreen.req
|
||||
-r components/testing.req
|
||||
|
||||
-r components/aws.req
|
||||
-r components/tooling_metrics.req
|
@ -1,21 +0,0 @@
|
||||
# This file is specifically intended to pull in *all* of our
|
||||
# requirements.
|
||||
|
||||
-r components/core.req
|
||||
|
||||
-r components/compile.req
|
||||
-r components/evergreen.req
|
||||
-r components/lint.req
|
||||
-r components/testing.req
|
||||
-r components/idl.req
|
||||
|
||||
-r components/aws.req
|
||||
-r components/external_auth.req
|
||||
-r components/jiraclient.req
|
||||
|
||||
-r components/platform.req
|
||||
|
||||
-r components/build_metrics.req
|
||||
-r components/libdeps.req
|
||||
|
||||
-r components/tooling_metrics.req
|
@ -293,10 +293,13 @@ functions:
|
||||
|
||||
mkdir -p mongodb/bin
|
||||
|
||||
/opt/mongodbtoolchain/v4/bin/virtualenv --python /opt/mongodbtoolchain/v4/bin/python3 "${workdir}/compile_venv"
|
||||
# This is copied from evergreen/functions/venv_setup.sh
|
||||
# Consider consolidating this in the future
|
||||
/opt/mongodbtoolchain/v4/bin/python3 -m venv "${workdir}/compile_venv"
|
||||
source "${workdir}/compile_venv/bin/activate"
|
||||
|
||||
python -m pip install -r etc/pip/compile-requirements.txt
|
||||
python -m pip install "poetry==1.5.1"
|
||||
python -m poetry install --no-root --sync
|
||||
- command: expansions.write
|
||||
params:
|
||||
file: expansions.yml
|
||||
|
@ -1,12 +0,0 @@
|
||||
# Quick check to ensure all scons.py dependecies have been added to compile-requirements.txt
|
||||
set -o errexit
|
||||
set -o verbose
|
||||
|
||||
# Create virtual env
|
||||
/opt/mongodbtoolchain/v4/bin/virtualenv --python /opt/mongodbtoolchain/v4/bin/python3 ./compile_venv
|
||||
source ./compile_venv/bin/activate
|
||||
|
||||
# Try printing scons.py help message
|
||||
cd src
|
||||
python -m pip install -r etc/pip/compile-requirements.txt
|
||||
buildscripts/scons.py --help
|
@ -1,19 +0,0 @@
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
|
||||
. "$DIR/prelude.sh"
|
||||
|
||||
set -o errexit
|
||||
set -o verbose
|
||||
activate_venv
|
||||
|
||||
# Not all git get project calls clone into ${workdir}/src so we allow
|
||||
# callers to tell us where the pip requirements files are.
|
||||
pip_dir="${pip_dir}"
|
||||
if [[ -z $pip_dir ]]; then
|
||||
# Default to most common location
|
||||
pip_dir="${workdir}/src/etc/pip"
|
||||
fi
|
||||
|
||||
# Same as above we have to use quotes to preserve the
|
||||
# Windows path separator
|
||||
external_auth_txt="$pip_dir/components/aws.req"
|
||||
python -m pip install -r "$external_auth_txt"
|
@ -1,19 +0,0 @@
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
|
||||
. "$DIR/prelude.sh"
|
||||
|
||||
set -o errexit
|
||||
set -o verbose
|
||||
activate_venv
|
||||
|
||||
# Not all git get project calls clone into ${workdir}/src so we allow
|
||||
# callers to tell us where the pip requirements files are.
|
||||
pip_dir="${pip_dir}"
|
||||
if [[ -z $pip_dir ]]; then
|
||||
# Default to most common location
|
||||
pip_dir="${workdir}/src/etc/pip"
|
||||
fi
|
||||
|
||||
# Same as above we have to use quotes to preserve the
|
||||
# Windows path separator
|
||||
external_auth_txt="$pip_dir/external-auth-requirements.txt"
|
||||
python -m pip install -r "$external_auth_txt"
|
@ -6,6 +6,8 @@ evergreen_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)/..
|
||||
. "$evergreen_dir/prelude_python.sh"
|
||||
|
||||
python_loc=$(which ${python})
|
||||
echo "python_loc set to $python_loc"
|
||||
|
||||
venv_dir="${workdir}/venv"
|
||||
if [ -d "$venv_dir" ]; then
|
||||
exit 0
|
||||
@ -54,18 +56,6 @@ fi
|
||||
|
||||
export VIRTUAL_ENV_DISABLE_PROMPT=yes
|
||||
|
||||
# Not all git get project calls clone into ${workdir}/src so we allow
|
||||
# callers to tell us where the pip requirements files are.
|
||||
pip_dir="${pip_dir}"
|
||||
if [[ -z $pip_dir ]]; then
|
||||
# Default to most common location
|
||||
pip_dir="${workdir}/src/etc/pip"
|
||||
fi
|
||||
|
||||
# Same as above we have to use quotes to preserve the
|
||||
# Windows path separator
|
||||
toolchain_txt="$pip_dir/toolchain-requirements.txt"
|
||||
|
||||
# the whole prelude cannot be imported because it requires pyyaml to be
|
||||
# installed, which happens just below.
|
||||
. "$evergreen_dir/prelude_venv.sh"
|
||||
@ -78,6 +68,7 @@ echo "Upgrading pip to 21.0.1"
|
||||
# By retrying we would like to only see errors that happen consistently
|
||||
for i in {1..5}; do
|
||||
python -m pip --disable-pip-version-check install "pip==21.0.1" "wheel==0.37.0" && RET=0 && break || RET=$? && sleep 1
|
||||
echo "Python failed to install pip and wheel, retrying..."
|
||||
done
|
||||
|
||||
if [ $RET -ne 0 ]; then
|
||||
@ -85,16 +76,44 @@ if [ $RET -ne 0 ]; then
|
||||
exit $RET
|
||||
fi
|
||||
|
||||
# Loop 5 times to retry the poetry install
|
||||
# We have seen weird network errors that can sometimes mess up the pip install
|
||||
# By retrying we would like to only see errors that happen consistently
|
||||
for i in {1..5}; do
|
||||
if [ "Windows_NT" = "$OS" ]; then
|
||||
# Windows has a bug where reinstalling charset-normalizer fails since it is being used at the same time it is being uninstalled
|
||||
# We need to install the same version that is pinned in poetry
|
||||
python -m pip install "poetry==1.5.1" "charset-normalizer==2.0.12" && RET=0 && break || RET=$? && sleep 1
|
||||
elif uname -a | grep -q 's390x\|ppc64le'; then
|
||||
# s390x and ppc64le both require these old versions for some reason
|
||||
# They are pinned deps as well
|
||||
python -m pip install "poetry==1.5.1" "cryptography==2.3" "pyOpenSSL==19.0.0" && RET=0 && break || RET=$? && sleep 1
|
||||
else
|
||||
python -m pip install "poetry==1.5.1" && RET=0 && break || RET=$? && sleep 1
|
||||
fi
|
||||
echo "Python failed to install poetry, retrying..."
|
||||
done
|
||||
|
||||
if [ $RET -ne 0 ]; then
|
||||
echo "Pip install error for poetry"
|
||||
exit $RET
|
||||
fi
|
||||
|
||||
cd src
|
||||
|
||||
# Loop 5 times to retry full venv install
|
||||
# We have seen weird network errors that can sometimes mess up the pip install
|
||||
# By retrying we would like to only see errors that happen consistently
|
||||
for i in {1..5}; do
|
||||
python -m pip --disable-pip-version-check install -r "$toolchain_txt" -q --log install.log && RET=0 && break || RET=$? && sleep 1
|
||||
python -m poetry install --no-root --sync && RET=0 && break || RET=$? && sleep 1
|
||||
echo "Python failed install required deps with poetry, retrying..."
|
||||
done
|
||||
|
||||
if [ $RET -ne 0 ]; then
|
||||
echo "Pip install error for full venv: $toolchain_txt"
|
||||
cat install.log || true
|
||||
echo "Poetry install error for full venv"
|
||||
exit $RET
|
||||
fi
|
||||
|
||||
cd ..
|
||||
|
||||
python -m pip freeze > pip-requirements.txt
|
||||
|
@ -4,4 +4,20 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
|
||||
cd src
|
||||
|
||||
activate_venv
|
||||
python -m pip install -r etc/pip/libdeps-requirements.txt
|
||||
|
||||
# Loop 5 times to retry libdeps install
|
||||
# We have seen weird network errors that can sometimes mess up the pip install
|
||||
# By retrying we would like to only see errors that happen consistently
|
||||
for i in {1..5}; do
|
||||
python -m poetry install --no-root --sync -E libdeps && RET=0 && break || RET=$? && sleep 1
|
||||
done
|
||||
|
||||
if [ $RET -ne 0 ]; then
|
||||
echo "Poetry install error for libdeps addition to venv"
|
||||
exit $RET
|
||||
fi
|
||||
|
||||
cd ..
|
||||
|
||||
# Overwrite pip-requirements since this is installing additional requirements
|
||||
python -m pip freeze > pip-requirements.txt
|
||||
|
@ -18,6 +18,7 @@ function activate_venv {
|
||||
exit 1
|
||||
fi
|
||||
python=${python}
|
||||
echo "Could not find venv. Setting python to $python."
|
||||
fi
|
||||
|
||||
if [ "Windows_NT" = "$OS" ]; then
|
||||
@ -42,5 +43,5 @@ function activate_venv {
|
||||
python -c "import sys; print(sys.path)"
|
||||
fi
|
||||
|
||||
echo "python set to $(which $python)"
|
||||
echo "python set to $(which $python) and python version: $($python --version)"
|
||||
}
|
||||
|
4214
poetry.lock
generated
Normal file
4214
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
171
pyproject.toml
Normal file
171
pyproject.toml
Normal file
@ -0,0 +1,171 @@
|
||||
[tool.poetry]
|
||||
name = "mdb-python-deps"
|
||||
version = "1.0.0"
|
||||
description = "MongoDB python dependencies"
|
||||
authors = ["MongoDB"]
|
||||
readme = "README.md"
|
||||
# These packages are included as a TODO if we want to run resmoke with pex
|
||||
# Currently this does nothing
|
||||
packages = [
|
||||
{include = "buildscripts/resmoke.py"},
|
||||
{include = "buildscripts/mongosymb.py"},
|
||||
{include = "buildscripts/build_system_options.py"},
|
||||
{include = "buildscripts/resmokelib"},
|
||||
{include = "buildscripts/idl"},
|
||||
{include = "buildscripts/util"}
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
# OPTIONAL
|
||||
# Libdeps depdencies
|
||||
networkx = "*" # This is also used as a part of build_metrics so it is sort of required
|
||||
flask = { version = "*", optional = true }
|
||||
flask-cors = { version = "*", optional = true }
|
||||
lxml = { version = "*", optional = true }
|
||||
eventlet = { version = "*", optional = true }
|
||||
gevent = { version = "*", optional = true }
|
||||
progressbar2 = { version = "*", optional = true }
|
||||
cxxfilt = { version = "*", optional = true }
|
||||
pympler = { version = "*", optional = true }
|
||||
|
||||
[tool.poetry.group.aws.dependencies]
|
||||
boto3 = "<=1.20.26"
|
||||
botocore = "<=1.23.26"
|
||||
|
||||
[tool.poetry.group.build-metrics.dependencies]
|
||||
jsonschema = "^4.17.3"
|
||||
psutil = "^5.8.0"
|
||||
memory-profiler = "^0.61.0"
|
||||
puremagic = "^1.15"
|
||||
tabulate = "^0.9.0"
|
||||
|
||||
[tool.poetry.group.compile.dependencies]
|
||||
cheetah3 = "<=3.2.6.post1" # src/mongo/base/generate_error_codes.py
|
||||
packaging = "<=21.3"
|
||||
regex = "<=2021.11.10"
|
||||
requirements_parser = "<=0.3.1"
|
||||
setuptools = "58.1.0"
|
||||
|
||||
[tool.poetry.group.core.dependencies]
|
||||
psutil = "5.8.0"
|
||||
pymongo = "4.3.3"
|
||||
PyYAML = "5.3.1"
|
||||
types-PyYAML = "~6.0.12.10"
|
||||
requests = "2.26.0"
|
||||
typing-extensions = ">3.7.4"
|
||||
|
||||
[tool.poetry.group.export.dependencies]
|
||||
pipx = "1.2.0"
|
||||
# TODO: Add in pex as we move forward with this
|
||||
# pex = "^2.1.137"
|
||||
poetry = "*"
|
||||
|
||||
[tool.poetry.group.evergreen.dependencies]
|
||||
click = "^7.1.2"
|
||||
inject = "~4.3.1"
|
||||
GitPython = "~3.1.7"
|
||||
pydantic = "~1.8.2"
|
||||
structlog = "~23.1.0"
|
||||
|
||||
[tool.poetry.group.external-auth.dependencies]
|
||||
# These are the dependencies of ldaptor
|
||||
passlib = "1.7.4"
|
||||
pyOpenSSL = [
|
||||
{ version = "19.0.0", markers = "platform_machine == 's390x' or platform_machine == 'ppc64le'" }, # Needed for pinned cryptography package - see SERVER-70845
|
||||
{ version = "22.0.0", markers = "platform_machine != 's390x' and platform_machine != 'ppc64le'" },
|
||||
]
|
||||
pyparsing = "2.4.0"
|
||||
service_identity = "18.1.0"
|
||||
twisted = "21.2.0"
|
||||
'zope.interface' = "5.0.0"
|
||||
ldaptor = "19.0.0"
|
||||
|
||||
[tool.poetry.group.idl.dependencies]
|
||||
unittest-xml-reporting = "3.0.4"
|
||||
packaging = "21.3"
|
||||
|
||||
[tool.poetry.group.jira-client.dependencies]
|
||||
jira = "<=3.1.1"
|
||||
requests-oauth = "<=0.4.1"
|
||||
PyJWT = "<=2.3.0" # https://github.com/pycontribs/jira/issues/247
|
||||
# We are omitting pycrypto based on https://github.com/pycontribs/jira/pull/629
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
# Note: These versions are checked by python modules in buildscripts/linter/
|
||||
GitPython = "~3.1.7"
|
||||
mypy = "~1.3.0"
|
||||
pydocstyle = "6.3.0"
|
||||
pylint = "2.17.4"
|
||||
structlog = "~23.1.0"
|
||||
yamllint = "1.32.0"
|
||||
yapf = "0.26.0" # latest is 0.40.1, but that causes CI failures
|
||||
types-setuptools = "57.4.12" # latest is 68.0.0.0, but that causes pip install requirements to fail
|
||||
types-requests = "2.31.0.1"
|
||||
tqdm = "*"
|
||||
colorama = "0.4.6"
|
||||
|
||||
[tool.poetry.group.platform.dependencies]
|
||||
pypiwin32 = {version = ">=223", markers = "platform_machine == 'win32'"}
|
||||
pywin32 = {version = ">=225", markers = "platform_machine == 'win32'"}
|
||||
cryptography = [
|
||||
{ version = "2.3", markers = "platform_machine == 's390x' or platform_machine == 'ppc64le'" }, # Needed for oauthlib to use RSAAlgorithm # Version locked - see SERVER-36618
|
||||
{ version = "36.0.2", markers = "platform_machine != 's390x' and platform_machine != 'ppc64le'" },
|
||||
]
|
||||
mongo-ninja-python = [
|
||||
{ version = "1.11.1.5", markers = "sys_platform == 'linux'" },
|
||||
]
|
||||
ninja = [
|
||||
{ version = ">=1.10.0", markers = "sys_platform != 'linux'" },
|
||||
]
|
||||
|
||||
[tool.poetry.group.testing.dependencies]
|
||||
curatorbin = "1.2.3"
|
||||
PyKMIP = "0.10.0"
|
||||
evergreen-py = "3.4.4"
|
||||
jinja2 = "2.11.3"
|
||||
MarkupSafe = "1.1.0" # See SERVER-57036, this is a transitive dependency of jinja2
|
||||
mock = "4.0.3"
|
||||
shrub-py = "1.1.4"
|
||||
ocspresponder = "0.5.0"
|
||||
flask = "1.1.1"
|
||||
itsdangerous = "2.0.0"
|
||||
ocspbuilder = "0.10.2"
|
||||
Werkzeug = "2.0.3"
|
||||
blackduck = "1.0.1"
|
||||
PyGithub = "1.53"
|
||||
urllib3 = "1.26.7"
|
||||
distro = "1.5.0"
|
||||
dnspython = "2.1.0"
|
||||
proxy-protocol = "0.7.5"
|
||||
pkce = "1.0.3"
|
||||
oauthlib = "3.1.1"
|
||||
requests-oauthlib = "1.3.0"
|
||||
packaging = "21.3"
|
||||
docker = "^6.1.2"
|
||||
mongomock = "4.1.2"
|
||||
pyjwt = "<=2.3.0"
|
||||
selenium = "^4.9.1"
|
||||
geckodriver-autoinstaller = "^0.1.0"
|
||||
pigz-python = "*"
|
||||
opentelemetry-api = "*"
|
||||
opentelemetry-sdk = "*"
|
||||
# TODO: EVG-20576
|
||||
# Once the above is merged we can get rid of our dependency on grpc
|
||||
opentelemetry-exporter-otlp-proto-grpc = [ { version = "*", markers = "platform_machine != 'darwin'" } ] # TODO: SERVER-80336 this should work on macosx
|
||||
|
||||
[tool.poetry.group.tooling-metrics.dependencies]
|
||||
mongo-tooling-metrics = "1.0.8"
|
||||
|
||||
# This can be installed with "poetry install -E libdeps"
|
||||
[tool.poetry.extras]
|
||||
libdeps = ["flask", "flask-cors", "lxml", "eventlet", "gevent", "progressbar2", "cxxfilt", "pympler"]
|
||||
|
||||
# This entrypoint is included as a TODO if we want to run resmoke with pex
|
||||
# Currently this does nothing
|
||||
[tool.poetry.scripts]
|
||||
resmoke = "buildscripts.resmoke:entrypoint"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
@ -5,6 +5,8 @@
|
||||
# should be used for finding such external modules or
|
||||
# missing dependencies.
|
||||
import sys
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
|
||||
class MissingRequirements(Exception):
|
||||
@ -12,7 +14,7 @@ class MissingRequirements(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def verify_requirements(requirements_file: str, silent: bool = False, executable=sys.executable):
|
||||
def verify_requirements(silent: bool = False, executable=sys.executable):
|
||||
"""Check if the modules in a pip requirements file are installed.
|
||||
This allows for a more friendly user message with guidance on how to
|
||||
resolve the missing dependencies.
|
||||
@ -32,40 +34,36 @@ def verify_requirements(requirements_file: str, silent: bool = False, executable
|
||||
f"Try running:\n"
|
||||
f" {executable} -m pip install {pip_pkg}") from ex
|
||||
|
||||
# Import the prequisites for this function, providing hints on failure.
|
||||
# Import poetry. If this fails then we know the next function will fail.
|
||||
# This is so the user will have an easier time diagnosing the problem
|
||||
try:
|
||||
import requirements
|
||||
import poetry
|
||||
except ModuleNotFoundError as ex:
|
||||
raiseSuggestion(ex, "requirements_parser")
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ModuleNotFoundError as ex:
|
||||
raiseSuggestion(ex, "setuptools")
|
||||
raiseSuggestion(ex, "'poetry==1.5.1'")
|
||||
|
||||
verbose("Checking required python packages...")
|
||||
|
||||
# Reduce a pip requirements file to its PEP 508 requirement specifiers.
|
||||
with open(requirements_file) as fd:
|
||||
pip_lines = [p.line for p in requirements.parse(fd)]
|
||||
|
||||
# The PEP 508 requirement specifiers can be parsed by the `pkg_resources`.
|
||||
pkg_requirements = list(pkg_resources.parse_requirements(pip_lines))
|
||||
poetry_dry_run_proc = subprocess.run(
|
||||
[executable, "-m", "poetry", "install", "--no-root", "--sync", "--dry-run"], check=True,
|
||||
capture_output=True, text=True)
|
||||
|
||||
# String match should look like the following
|
||||
# Package operations: 2 installs, 3 updates, 0 removals, 165 skipped
|
||||
match = re.search(r"Package operations: (\d+) \w+, (\d+) \w+, (\d+) \w+, (\d+) \w+",
|
||||
poetry_dry_run_proc.stdout)
|
||||
verbose("Requirements list:")
|
||||
for req in sorted(set([str(req) for req in pkg_requirements])):
|
||||
verbose(f" {req}")
|
||||
|
||||
# Resolve all the requirements at once.
|
||||
# This should help expose dependency hell among the requirements.
|
||||
try:
|
||||
dists = pkg_resources.working_set.resolve(pkg_requirements)
|
||||
except pkg_resources.ResolutionError as ex:
|
||||
raiseSuggestion(
|
||||
ex,
|
||||
f"-r {requirements_file}",
|
||||
)
|
||||
|
||||
verbose("Resolved to these distributions:")
|
||||
for dist in sorted(set([f" {dist.key} {dist.version}" for dist in dists])):
|
||||
verbose(dist)
|
||||
verbose(poetry_dry_run_proc.stdout)
|
||||
installs = int(match[1])
|
||||
updates = int(match[2])
|
||||
removals = int(match[3])
|
||||
if updates == 1 and sys.platform == 'win32' and "Updating pywin32" in poetry_dry_run_proc.stdout:
|
||||
# We have no idea why pywin32 thinks it needs to be updated
|
||||
# We could use some more investigation into this
|
||||
verbose(
|
||||
"Windows detected a single update to pywin32 which is known to be buggy. Continuing.")
|
||||
elif installs + updates + removals > 0:
|
||||
raise MissingRequirements(
|
||||
f"Detected one or more packages are out of date. "
|
||||
f"Try running:\n"
|
||||
f" export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring\n"
|
||||
f" {executable} -m poetry install --no-root --sync")
|
||||
|
@ -122,7 +122,7 @@ def build_pretty_printer_test(env, target, **kwargs):
|
||||
"gdb.execute('set confirm off')",
|
||||
"gdb.execute('source .gdbinit')",
|
||||
"try:",
|
||||
" verify_requirements('etc/pip/components/core.req', executable=f'@python_executable@')",
|
||||
" verify_requirements(executable=f'@python_executable@')",
|
||||
"except MissingRequirements as ex:",
|
||||
" print(ex)",
|
||||
" print('continuing testing anyways!')",
|
||||
|
Loading…
Reference in New Issue
Block a user