mirror of
https://github.com/mongodb/mongo.git
synced 2024-12-01 09:32:32 +01:00
SERVER-72348 Use the new mongo-tooling-metrics library
This commit is contained in:
parent
6a44a4153f
commit
10a35d0d9b
32
SConstruct
32
SConstruct
@ -22,9 +22,9 @@ from pkg_resources import parse_version
|
||||
|
||||
import SCons
|
||||
import SCons.Script
|
||||
from buildscripts.metrics.metrics_datatypes import SConsToolingMetrics
|
||||
from buildscripts.metrics.tooling_exit_hook import initialize_exit_hook
|
||||
from buildscripts.metrics.tooling_metrics_utils import register_metrics_collection_atexit
|
||||
from mongo_tooling_metrics.client import get_mongo_metrics_client
|
||||
from mongo_tooling_metrics.errors import ExternalHostException
|
||||
from mongo_tooling_metrics.lib.top_level_metrics import SConsToolingMetrics
|
||||
from site_scons.mongo import build_profiles
|
||||
|
||||
# This must be first, even before EnsureSConsVersion, if
|
||||
@ -1590,15 +1590,23 @@ env.AddMethod(lambda env, name, **kwargs: add_option(name, **kwargs), 'AddOption
|
||||
|
||||
# The placement of this is intentional. Here we setup an atexit method to store tooling metrics.
|
||||
# We should only register this function after env, env_vars and the parser have been properly initialized.
|
||||
register_metrics_collection_atexit(
|
||||
SConsToolingMetrics.generate_metrics, {
|
||||
"utc_starttime": datetime.utcnow(),
|
||||
"env_vars": env_vars,
|
||||
"env": env,
|
||||
"parser": _parser,
|
||||
"args": sys.argv,
|
||||
"exit_hook": initialize_exit_hook(),
|
||||
})
|
||||
try:
|
||||
metrics_client = get_mongo_metrics_client()
|
||||
metrics_client.register_metrics(
|
||||
SConsToolingMetrics,
|
||||
utc_starttime=datetime.utcnow(),
|
||||
artifact_dir=env.Dir('$BUILD_DIR').get_abspath(),
|
||||
env_vars=env_vars,
|
||||
env=env,
|
||||
parser=_parser,
|
||||
args=sys.argv,
|
||||
)
|
||||
except ExternalHostException as _:
|
||||
pass
|
||||
except Exception as _:
|
||||
print(
|
||||
"This MongoDB Virtual Workstation could not connect to the internal cluster\nThis is a non-issue, but if this message persists feel free to reach out in #server-dev-platform"
|
||||
)
|
||||
|
||||
if get_option('build-metrics'):
|
||||
env['BUILD_METRICS_ARTIFACTS_DIR'] = '$BUILD_ROOT/$VARIANT_DIR'
|
||||
|
@ -1,297 +0,0 @@
|
||||
from abc import abstractmethod
|
||||
import configparser
|
||||
from datetime import datetime
|
||||
import multiprocessing
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional
|
||||
import distro
|
||||
import git
|
||||
from pydantic import BaseModel
|
||||
|
||||
from buildscripts.metrics.tooling_exit_hook import _ExitHook
|
||||
|
||||
# pylint: disable=bare-except
|
||||
|
||||
SCONS_ENV_FILE = "scons_env.env"
|
||||
SCONS_SECTION_HEADER = "SCONS_ENV"
|
||||
|
||||
|
||||
class BaseMetrics(BaseModel):
|
||||
"""Base class for an metrics object."""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def generate_metrics(cls, **kwargs):
|
||||
"""Generate metrics."""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BuildInfo(BaseMetrics):
|
||||
"""Class to store the Build environment, options & artifacts."""
|
||||
|
||||
env: Optional[Dict[str, Any]]
|
||||
options: Optional[Dict[str, Any]]
|
||||
build_artifacts: Optional[List[str]]
|
||||
artifact_dir: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def generate_metrics(
|
||||
cls,
|
||||
utc_starttime: datetime,
|
||||
env_vars: "SCons.Variables.Variables",
|
||||
env: "SCons.Script.SConscript.SConsEnvironment",
|
||||
parser: "SCons.Script.SConsOptions.SConsOptionParser",
|
||||
args: List[str],
|
||||
): # pylint: disable=arguments-differ
|
||||
"""Get SCons build info to the best of our ability."""
|
||||
artifact_dir = cls._get_scons_artifact_dir(env)
|
||||
return cls(
|
||||
env=cls._get_scons_env_vars_dict(env_vars, env),
|
||||
options=cls._get_scons_options_dict(parser, args),
|
||||
build_artifacts=cls._get_artifacts(utc_starttime, artifact_dir),
|
||||
artifact_dir=artifact_dir,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_scons_env_vars_dict(
|
||||
env_vars: "SCons.Variables.Variables",
|
||||
env: "SCons.Script.SConscript.SConsEnvironment",
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get the environment variables options that can be set by users."""
|
||||
|
||||
artifact_dir = BuildInfo._get_scons_artifact_dir(env)
|
||||
artifact_dir = artifact_dir if artifact_dir else '.'
|
||||
scons_env_filepath = f'{artifact_dir}/{SCONS_ENV_FILE}'
|
||||
try:
|
||||
# Use SCons built-in method to save environment variables to a file
|
||||
env_vars.Save(scons_env_filepath, env)
|
||||
|
||||
# Add a section header to the file so we can easily parse with ConfigParser
|
||||
with open(scons_env_filepath, 'r') as original:
|
||||
data = original.read()
|
||||
with open(scons_env_filepath, 'w') as modified:
|
||||
modified.write(f"[{SCONS_SECTION_HEADER}]\n" + data)
|
||||
|
||||
# Parse file using config parser
|
||||
config = configparser.ConfigParser()
|
||||
config.read(scons_env_filepath)
|
||||
str_dict = dict(config[SCONS_SECTION_HEADER])
|
||||
return {key: eval(val) for key, val in str_dict.items()} # pylint: disable=eval-used
|
||||
except:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_scons_options_dict(
|
||||
parser: "SCons.Script.SConsOptions.SConsOptionParser",
|
||||
args: List[str],
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get the scons cli options set by users."""
|
||||
try:
|
||||
scons_options, _ = parser.parse_args(args)
|
||||
return vars(scons_options)
|
||||
except:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_scons_artifact_dir(env: "SCons.Script.SConscript.SConsEnvironment") -> Optional[str]:
|
||||
"""Get the artifact dir for this build."""
|
||||
try:
|
||||
return env.Dir('$BUILD_DIR').get_abspath()
|
||||
except:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_artifacts(utc_starttime: datetime, artifact_dir: str) -> List[str]:
|
||||
"""Search a directory recursively for all files created after the given timestamp."""
|
||||
try:
|
||||
start_timestamp = datetime.timestamp(utc_starttime)
|
||||
artifacts = []
|
||||
for root, _, files in os.walk(artifact_dir):
|
||||
for file in files:
|
||||
filepath = os.path.join(root, file)
|
||||
_, ext = os.path.splitext(filepath)
|
||||
if ext in ['.a', '.so', ''] and os.path.getmtime(filepath) >= start_timestamp:
|
||||
artifacts.append(filepath)
|
||||
return artifacts
|
||||
except:
|
||||
return None
|
||||
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
return None in [self.artifact_dir, self.env, self.options, self.build_artifacts]
|
||||
|
||||
|
||||
class HostInfo(BaseMetrics):
|
||||
"""Class to store host information."""
|
||||
|
||||
ip_address: Optional[str]
|
||||
host_os: str
|
||||
num_cores: int
|
||||
memory: Optional[float]
|
||||
|
||||
@classmethod
|
||||
def generate_metrics(cls): # pylint: disable=arguments-differ
|
||||
"""Get the host info to the best of our ability."""
|
||||
try:
|
||||
ip_address = socket.gethostbyname(socket.gethostname())
|
||||
except:
|
||||
ip_address = None
|
||||
try:
|
||||
memory = cls._get_memory()
|
||||
except:
|
||||
memory = None
|
||||
return cls(
|
||||
ip_address=ip_address,
|
||||
host_os=distro.name(pretty=True),
|
||||
num_cores=multiprocessing.cpu_count(),
|
||||
memory=memory,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_memory():
|
||||
"""Get total memory of the host system."""
|
||||
return os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024.**3)
|
||||
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
return None in [self.memory, self.ip_address]
|
||||
|
||||
|
||||
class GitInfo(BaseMetrics):
|
||||
"""Class to store git repo information."""
|
||||
|
||||
filepath: str
|
||||
commit_hash: Optional[str]
|
||||
branch_name: Optional[str]
|
||||
repo_name: Optional[str]
|
||||
|
||||
@classmethod
|
||||
def generate_metrics(cls, filepath: str): # pylint: disable=arguments-differ
|
||||
"""Get the git info for a repo to the best of our ability."""
|
||||
try:
|
||||
commit_hash = git.Repo(filepath).head.commit.hexsha
|
||||
except:
|
||||
commit_hash = None
|
||||
try:
|
||||
if git.Repo(filepath).head.is_detached:
|
||||
branch_name = commit_hash
|
||||
else:
|
||||
branch_name = git.Repo(filepath).active_branch.name
|
||||
except:
|
||||
branch_name = None
|
||||
try:
|
||||
repo_name = git.Repo(filepath).working_tree_dir.split("/")[-1]
|
||||
except:
|
||||
repo_name = None
|
||||
return cls(
|
||||
filepath=filepath,
|
||||
commit_hash=commit_hash,
|
||||
branch_name=branch_name,
|
||||
repo_name=repo_name,
|
||||
)
|
||||
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
return None in [self.commit_hash, self.branch_name, self.repo_name]
|
||||
|
||||
|
||||
MODULES_FILEPATH = 'src/mongo/db/modules'
|
||||
|
||||
|
||||
def _get_modules_git_info():
|
||||
"""Get git info for all modules."""
|
||||
module_git_info = []
|
||||
try:
|
||||
module_git_info = [
|
||||
GitInfo.generate_metrics(os.path.join(MODULES_FILEPATH, module))
|
||||
for module in os.listdir(MODULES_FILEPATH)
|
||||
if os.path.isdir(os.path.join(MODULES_FILEPATH, module))
|
||||
]
|
||||
except:
|
||||
pass
|
||||
return module_git_info
|
||||
|
||||
|
||||
class ResmokeToolingMetrics(BaseMetrics):
|
||||
"""Class to store resmoke tooling metrics."""
|
||||
|
||||
source: str
|
||||
utc_starttime: datetime
|
||||
utc_endtime: datetime
|
||||
host_info: HostInfo
|
||||
git_info: GitInfo
|
||||
exit_code: Optional[int]
|
||||
command: List[str]
|
||||
module_info: List[GitInfo]
|
||||
|
||||
@classmethod
|
||||
def generate_metrics(
|
||||
cls,
|
||||
utc_starttime: datetime,
|
||||
exit_hook: _ExitHook,
|
||||
): # pylint: disable=arguments-differ
|
||||
"""Get resmoke metrics to the best of our ability."""
|
||||
return cls(
|
||||
source='resmoke',
|
||||
utc_starttime=utc_starttime,
|
||||
utc_endtime=datetime.utcnow(),
|
||||
host_info=HostInfo.generate_metrics(),
|
||||
git_info=GitInfo.generate_metrics('.'),
|
||||
exit_code=exit_hook.exit_code if isinstance(exit_hook.exit_code, int) else None,
|
||||
command=sys.argv,
|
||||
module_info=_get_modules_git_info(),
|
||||
)
|
||||
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
sub_metrics = self.module_info + [self.git_info] + [self.host_info]
|
||||
return self.exit_code is None or any(metrics.is_malformed() for metrics in sub_metrics)
|
||||
|
||||
|
||||
class SConsToolingMetrics(BaseMetrics):
|
||||
"""Class to store scons tooling metrics."""
|
||||
|
||||
source: str
|
||||
utc_starttime: datetime
|
||||
utc_endtime: datetime
|
||||
host_info: HostInfo
|
||||
git_info: GitInfo
|
||||
exit_code: Optional[int]
|
||||
build_info: BuildInfo
|
||||
command: List[str]
|
||||
module_info: List[GitInfo]
|
||||
|
||||
@classmethod
|
||||
def generate_metrics(
|
||||
cls,
|
||||
utc_starttime: datetime,
|
||||
env_vars: "SCons.Variables.Variables",
|
||||
env: "SCons.Script.SConscript.SConsEnvironment",
|
||||
parser: "SCons.Script.SConsOptions.SConsOptionParser",
|
||||
args: List[str],
|
||||
exit_hook: _ExitHook,
|
||||
): # pylint: disable=arguments-differ
|
||||
"""Get scons metrics to the best of our ability."""
|
||||
return cls(
|
||||
source='scons',
|
||||
utc_starttime=utc_starttime,
|
||||
utc_endtime=datetime.utcnow(),
|
||||
host_info=HostInfo.generate_metrics(),
|
||||
git_info=GitInfo.generate_metrics('.'),
|
||||
build_info=BuildInfo.generate_metrics(utc_starttime, env_vars, env, parser, args),
|
||||
exit_code=exit_hook.exit_code if isinstance(exit_hook.exit_code, int) else None,
|
||||
command=sys.argv,
|
||||
module_info=_get_modules_git_info(),
|
||||
)
|
||||
|
||||
def is_malformed(self) -> bool:
|
||||
"""Confirm whether this instance has all expected fields."""
|
||||
sub_metrics = self.module_info + [self.git_info] + [self.host_info] + [self.build_info]
|
||||
return self.exit_code is None or any(metrics.is_malformed() for metrics in sub_metrics)
|
@ -1,36 +0,0 @@
|
||||
import sys
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
|
||||
# DO NOT INITIALIZE DIRECTLY -- This is intended to be a singleton.
|
||||
class _ExitHook(object):
|
||||
"""Plumb all sys.exit through this object so that we can access the exit code in atexit."""
|
||||
|
||||
def __init__(self):
|
||||
self.exit_code = 0
|
||||
self._orig_exit = sys.exit
|
||||
sys.exit = self.exit
|
||||
|
||||
def __del__(self):
|
||||
sys.exit = self._orig_exit
|
||||
|
||||
def exit(self, code=0):
|
||||
self.exit_code = code
|
||||
self._orig_exit(code)
|
||||
|
||||
|
||||
SINGLETON_TOOLING_METRICS_EXIT_HOOK = None
|
||||
|
||||
|
||||
# Always use this method when initializing _ExitHook -- This guarantees you are using the singleton
|
||||
# initialize the exit hook as early as possible to ensure we capture the error.
|
||||
def initialize_exit_hook() -> None:
|
||||
"""Initialize the exit hook."""
|
||||
try:
|
||||
if not SINGLETON_TOOLING_METRICS_EXIT_HOOK:
|
||||
SINGLETON_TOOLING_METRICS_EXIT_HOOK = _ExitHook()
|
||||
except UnboundLocalError as _:
|
||||
SINGLETON_TOOLING_METRICS_EXIT_HOOK = _ExitHook()
|
||||
return SINGLETON_TOOLING_METRICS_EXIT_HOOK
|
@ -1,76 +0,0 @@
|
||||
import atexit
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Callable, Dict
|
||||
import pymongo
|
||||
|
||||
logger = logging.getLogger('tooling_metrics')
|
||||
|
||||
INTERNAL_TOOLING_METRICS_HOSTNAME = "mongodb+srv://dev-metrics-pl-0.kewhj.mongodb.net"
|
||||
INTERNAL_TOOLING_METRICS_USERNAME = "internal_tooling_user"
|
||||
INTERNAL_TOOLING_METRICS_PASSWORD = "internal_tooling_user"
|
||||
|
||||
|
||||
def _get_internal_tooling_metrics_client() -> pymongo.MongoClient:
|
||||
"""Retrieve client for internal MongoDB tooling metrics cluster."""
|
||||
return pymongo.MongoClient(
|
||||
host=INTERNAL_TOOLING_METRICS_HOSTNAME,
|
||||
username=INTERNAL_TOOLING_METRICS_USERNAME,
|
||||
password=INTERNAL_TOOLING_METRICS_PASSWORD,
|
||||
socketTimeoutMS=1000,
|
||||
serverSelectionTimeoutMS=1000,
|
||||
connectTimeoutMS=1000,
|
||||
waitQueueTimeoutMS=1000,
|
||||
retryWrites=False,
|
||||
)
|
||||
|
||||
|
||||
MONGOD_INTENRAL_DISTRO_FILEPATH = '/etc/mongodb-distro-name'
|
||||
|
||||
|
||||
def _is_virtual_workstation() -> bool:
|
||||
"""Detect whether this is a MongoDB internal virtual workstation."""
|
||||
try:
|
||||
with open(MONGOD_INTENRAL_DISTRO_FILEPATH, 'r') as file:
|
||||
return file.read().strip() == 'ubuntu1804-workstation'
|
||||
except Exception as _: # pylint: disable=broad-except
|
||||
return False
|
||||
|
||||
|
||||
TOOLING_METRICS_OPT_OUT = "TOOLING_METRICS_OPT_OUT"
|
||||
|
||||
|
||||
def _has_metrics_opt_out() -> bool:
|
||||
"""Check whether the opt out environment variable is set."""
|
||||
return os.environ.get(TOOLING_METRICS_OPT_OUT, None) == '1'
|
||||
|
||||
|
||||
def _should_collect_metrics() -> bool:
|
||||
"""Determine whether to collect tooling metrics."""
|
||||
return _is_virtual_workstation() and not _has_metrics_opt_out()
|
||||
|
||||
|
||||
# DO NOT USE DIRECTLY -- This is only to be used when metrics collection is registered atexit
|
||||
def _save_metrics(
|
||||
generate_metrics_function: Callable,
|
||||
generate_metrics_args: Dict[str, Any],
|
||||
) -> None:
|
||||
"""Save metrics to the atlas cluster."""
|
||||
try:
|
||||
client = _get_internal_tooling_metrics_client()
|
||||
metrics = generate_metrics_function(**generate_metrics_args)
|
||||
client.metrics.tooling_metrics.insert_one(metrics.dict())
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
logger.warning(
|
||||
"%s\n\nInternal Metrics Collection Failed -- this is a non-issue.\nIf this message persists, feel free to reach out to #server-dev-platform",
|
||||
exc)
|
||||
|
||||
|
||||
# This is the only util that should be used externally
|
||||
def register_metrics_collection_atexit(
|
||||
generate_metrics_function: Callable,
|
||||
generate_metrics_args: Dict[str, Any],
|
||||
) -> None:
|
||||
"""Register metrics collection on atexit."""
|
||||
if _should_collect_metrics():
|
||||
atexit.register(_save_metrics, generate_metrics_function, generate_metrics_args)
|
@ -4,9 +4,9 @@ from datetime import datetime
|
||||
import time
|
||||
import os
|
||||
import psutil
|
||||
from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics
|
||||
from buildscripts.metrics.tooling_exit_hook import initialize_exit_hook
|
||||
from buildscripts.metrics.tooling_metrics_utils import register_metrics_collection_atexit
|
||||
from mongo_tooling_metrics.client import get_mongo_metrics_client
|
||||
from mongo_tooling_metrics.errors import ExternalHostException
|
||||
from mongo_tooling_metrics.lib.top_level_metrics import ResmokeToolingMetrics
|
||||
from buildscripts.resmokelib import parser
|
||||
|
||||
|
||||
@ -27,8 +27,15 @@ def main(argv):
|
||||
"For example: resmoke.py run -h\n"
|
||||
"Note: bisect and setup-multiversion subcommands have been moved to db-contrib-tool (https://github.com/10gen/db-contrib-tool#readme).\n"
|
||||
)
|
||||
register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics, {
|
||||
"utc_starttime": datetime.utcfromtimestamp(__start_time),
|
||||
"exit_hook": initialize_exit_hook()
|
||||
})
|
||||
try:
|
||||
metrics_client = get_mongo_metrics_client()
|
||||
metrics_client.register_metrics(ResmokeToolingMetrics,
|
||||
utc_starttime=datetime.utcfromtimestamp(__start_time))
|
||||
except ExternalHostException as _:
|
||||
pass
|
||||
except Exception as _: # pylint: disable=broad-except
|
||||
print(
|
||||
"This MongoDB Virtual Workstation could not connect to the internal cluster\nThis is a non-issue, but if this message persists feel free to reach out in #server-dev-platform"
|
||||
)
|
||||
|
||||
subcommand.execute()
|
||||
|
@ -1,114 +0,0 @@
|
||||
"""Unit tests for metrics_datatypes.py."""
|
||||
from datetime import datetime
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from mock import MagicMock
|
||||
|
||||
import buildscripts.metrics.metrics_datatypes as under_test
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
MOCK_EXIT_HOOK = MagicMock(exit_code=0)
|
||||
|
||||
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_artifact_dir",
|
||||
return_value='/test')
|
||||
class TestBuildInfo(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_env_vars_dict",
|
||||
return_value={'env': 'env'})
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_options_dict",
|
||||
return_value={'opt': 'opt'})
|
||||
def test_build_info_valid(self, mock_env, mock_options, mock_artifact_dir):
|
||||
build_info = under_test.BuildInfo.generate_metrics(datetime.utcnow(), MagicMock(),
|
||||
MagicMock(), MagicMock(), MagicMock())
|
||||
assert not build_info.is_malformed()
|
||||
|
||||
def test_build_info_malformed(self, mock_artifact_dir):
|
||||
build_info = under_test.BuildInfo.generate_metrics(datetime.utcnow(), MagicMock(),
|
||||
MagicMock(), MagicMock(), MagicMock())
|
||||
assert build_info.is_malformed()
|
||||
|
||||
|
||||
class TestHostInfo(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.metrics_datatypes.HostInfo._get_memory", side_effect=Exception())
|
||||
def test_host_info_with_exc(self, mock_get_memory):
|
||||
host_info = under_test.HostInfo.generate_metrics()
|
||||
assert host_info.is_malformed()
|
||||
|
||||
# Mock this so that it passes when running the 'buildscripts_test' suite on Windows
|
||||
@patch("buildscripts.metrics.metrics_datatypes.HostInfo._get_memory", return_value=30)
|
||||
def test_host_info_no_exc(self, mock_get_memory):
|
||||
host_info = under_test.HostInfo.generate_metrics()
|
||||
assert not host_info.is_malformed()
|
||||
|
||||
|
||||
class TestGitInfo(unittest.TestCase):
|
||||
@patch("git.Repo", side_effect=Exception())
|
||||
def test_git_info_with_exc(self, mock_repo):
|
||||
git_info = under_test.GitInfo.generate_metrics('.')
|
||||
assert git_info.is_malformed()
|
||||
|
||||
def test_git_info_no_exc(self):
|
||||
git_info = under_test.GitInfo.generate_metrics('.')
|
||||
assert not git_info.is_malformed()
|
||||
|
||||
@patch("git.refs.symbolic.SymbolicReference.is_detached", True)
|
||||
def test_git_info_detached_head(self):
|
||||
git_info = under_test.GitInfo.generate_metrics('.')
|
||||
assert not git_info.is_malformed()
|
||||
|
||||
|
||||
class TestResmokeToolingMetrics(unittest.TestCase):
|
||||
@patch("socket.gethostname", side_effect=Exception())
|
||||
def test_resmoke_tooling_metrics_valid(self, mock_gethostname):
|
||||
tooling_metrics = under_test.ResmokeToolingMetrics.generate_metrics(
|
||||
datetime.utcnow(),
|
||||
MOCK_EXIT_HOOK,
|
||||
)
|
||||
assert tooling_metrics.is_malformed()
|
||||
|
||||
def test_resmoke_tooling_metrics_malformed(self):
|
||||
tooling_metrics = under_test.ResmokeToolingMetrics.generate_metrics(
|
||||
datetime.utcnow(),
|
||||
MOCK_EXIT_HOOK,
|
||||
)
|
||||
assert not tooling_metrics.is_malformed()
|
||||
|
||||
|
||||
class TestSConsToolingMetrics(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_artifact_dir",
|
||||
return_value='/test')
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_env_vars_dict",
|
||||
return_value={'env': 'env'})
|
||||
@patch("buildscripts.metrics.metrics_datatypes.BuildInfo._get_scons_options_dict",
|
||||
return_value={'opt': 'opt'})
|
||||
def test_scons_tooling_metrics_valid(self, mock_options, mock_env, mock_artifact_dir):
|
||||
parser = MagicMock()
|
||||
parser.parse_args = MagicMock(return_value={"opt1": "val1"})
|
||||
tooling_metrics = under_test.SConsToolingMetrics.generate_metrics(
|
||||
datetime.utcnow(),
|
||||
{'env': 'env'},
|
||||
{'opts': 'opts'},
|
||||
parser,
|
||||
['test1', 'test2'],
|
||||
MOCK_EXIT_HOOK,
|
||||
)
|
||||
assert not tooling_metrics.is_malformed()
|
||||
|
||||
def test_scons_tooling_metrics_malformed(self):
|
||||
tooling_metrics = under_test.SConsToolingMetrics.generate_metrics(
|
||||
datetime.utcnow(),
|
||||
{'env': 'env'},
|
||||
{'opts': 'opts'},
|
||||
None,
|
||||
[],
|
||||
MOCK_EXIT_HOOK,
|
||||
)
|
||||
assert tooling_metrics.is_malformed()
|
@ -1,47 +0,0 @@
|
||||
from datetime import datetime
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
import buildscripts.resmoke as under_test
|
||||
|
||||
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME = 'mongodb://testing:27017'
|
||||
CURRENT_DATE_TIME = datetime(2022, 10, 4)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
|
||||
@patch("buildscripts.resmokelib.logging.flush._FLUSH_THREAD", None)
|
||||
@patch("atexit.register")
|
||||
class TestResmokeAtExitMetricsCollection(unittest.TestCase):
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
|
||||
def test_resmoke_at_exit_metrics_collection(self, mock_should_collect_metrics,
|
||||
mock_atexit_register):
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" in atexit_functions
|
||||
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
|
||||
def test_no_resmoke_at_exit_metrics_collection(self, mock_should_collect_metrics,
|
||||
mock_atexit_register):
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" not in atexit_functions
|
||||
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'run', '--suite', 'buildscripts_test'])
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
|
||||
@patch("buildscripts.resmokelib.testing.executor.TestSuiteExecutor._run_tests",
|
||||
side_effect=Exception())
|
||||
def test_resmoke_at_exit_metrics_collection_exc(
|
||||
self, mock_exc_method, mock_should_collect_metrics, mock_atexit_register):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" in atexit_functions
|
@ -1,44 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
import buildscripts.scons as under_test
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=protected-access
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
|
||||
@patch("sys.argv", [
|
||||
'buildscripts/scons.py', "CC=/opt/mongodbtoolchain/v4/bin/gcc",
|
||||
"CXX=/opt/mongodbtoolchain/v4/bin/g++", "NINJA_PREFIX=test_success", "--ninja"
|
||||
])
|
||||
@patch("atexit.register")
|
||||
class TestSconsAtExitMetricsCollection(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
|
||||
def test_scons_at_exit_metrics_collection(self, mock_should_collect_metrics,
|
||||
mock_atexit_register):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" in atexit_functions
|
||||
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
|
||||
def test_no_scons_at_exit_metrics_collection(self, mock_should_collect_metrics,
|
||||
mock_atexit_register):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" not in atexit_functions
|
||||
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
|
||||
@patch("buildscripts.moduleconfig.get_module_sconscripts", side_effect=Exception())
|
||||
def test_scons_at_exit_metrics_collection_exc(
|
||||
self, mock_exc_method, mock_should_collect_metrics, mock_atexit_register):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_save_metrics" in atexit_functions
|
@ -1,94 +0,0 @@
|
||||
"""Unit tests for tooling_metrics.py."""
|
||||
from datetime import datetime
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import mock_open, patch
|
||||
from mock import MagicMock
|
||||
import mongomock
|
||||
import pymongo
|
||||
from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics, SConsToolingMetrics
|
||||
import buildscripts.metrics.tooling_metrics_utils as under_test
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=protected-access
|
||||
|
||||
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME = 'mongodb://testing:27017'
|
||||
RESMOKE_METRICS_ARGS = {
|
||||
"utc_starttime": datetime(2022, 10, 4),
|
||||
"exit_hook": MagicMock(exit_code=0),
|
||||
}
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
|
||||
@patch("atexit.register")
|
||||
class TestRegisterMetricsCollectionAtExit(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=True)
|
||||
def test_register_metrics_collection(self, mock_should_collect_metrics, mock_atexit):
|
||||
under_test.register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics,
|
||||
RESMOKE_METRICS_ARGS)
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit.call_args_list]
|
||||
assert "_save_metrics" in atexit_functions
|
||||
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._should_collect_metrics", return_value=False)
|
||||
def test_no_register_metrics_collection(self, mock_should_collect_metrics, mock_atexit):
|
||||
under_test.register_metrics_collection_atexit(ResmokeToolingMetrics.generate_metrics,
|
||||
RESMOKE_METRICS_ARGS)
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit.call_args_list]
|
||||
assert "_save_metrics" not in atexit_functions
|
||||
|
||||
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils.INTERNAL_TOOLING_METRICS_HOSTNAME",
|
||||
TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
|
||||
class TestSaveToolingMetrics(unittest.TestCase):
|
||||
@mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
|
||||
def test_save_resmoke_metrics(self):
|
||||
under_test._save_metrics(ResmokeToolingMetrics.generate_metrics, RESMOKE_METRICS_ARGS)
|
||||
client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
|
||||
assert client.metrics.tooling_metrics.find_one()
|
||||
|
||||
@mongomock.patch(servers=((TEST_INTERNAL_TOOLING_METRICS_HOSTNAME), ))
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._get_internal_tooling_metrics_client",
|
||||
side_effect=pymongo.errors.ServerSelectionTimeoutError(message="Error Information"))
|
||||
def test_save_metrics_with_exc(self, mock_save_metrics):
|
||||
with self.assertLogs('tooling_metrics') as cm:
|
||||
under_test._save_metrics(ResmokeToolingMetrics.generate_metrics, RESMOKE_METRICS_ARGS)
|
||||
assert "Error Information" in cm.output[0]
|
||||
assert "Internal Metrics Collection Failed" in cm.output[0]
|
||||
client = pymongo.MongoClient(host=TEST_INTERNAL_TOOLING_METRICS_HOSTNAME)
|
||||
assert not client.metrics.tooling_metrics.find_one()
|
||||
|
||||
|
||||
class TestIsVirtualWorkstation(unittest.TestCase):
|
||||
@patch("builtins.open", mock_open(read_data="ubuntu1804-workstation"))
|
||||
def test_is_virtual_workstation(self):
|
||||
assert under_test._is_virtual_workstation() is True
|
||||
|
||||
@patch("builtins.open", mock_open(read_data="test"))
|
||||
def test_is_not_virtual_workstation(self):
|
||||
assert under_test._is_virtual_workstation() is False
|
||||
|
||||
|
||||
class TestHasMetricsOptOut(unittest.TestCase):
|
||||
@patch("os.environ.get", return_value='1')
|
||||
def test_opt_out(self, mock_environ_get):
|
||||
assert under_test._has_metrics_opt_out()
|
||||
|
||||
@patch("os.environ.get", return_value=None)
|
||||
def test_no_opt_out(self, mock_environ_get):
|
||||
assert not under_test._has_metrics_opt_out()
|
||||
|
||||
|
||||
class TestShouldCollectMetrics(unittest.TestCase):
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._is_virtual_workstation", return_value=True)
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._has_metrics_opt_out", return_value=False)
|
||||
def test_should_collect_metrics(self, mock_opt_out, mock_is_virtual_env):
|
||||
assert under_test._should_collect_metrics()
|
||||
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._is_virtual_workstation", return_value=True)
|
||||
@patch("buildscripts.metrics.tooling_metrics_utils._has_metrics_opt_out", return_value=True)
|
||||
def test_no_collect_metrics_opt_out(self, mock_opt_out, mock_is_virtual_env):
|
||||
assert not under_test._should_collect_metrics()
|
@ -0,0 +1,73 @@
|
||||
from datetime import datetime
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
from mock import MagicMock
|
||||
from mongo_tooling_metrics import client
|
||||
from mongo_tooling_metrics.base_metrics import TopLevelMetrics
|
||||
|
||||
import buildscripts.resmoke as under_test
|
||||
|
||||
CURRENT_DATE_TIME = datetime(2022, 10, 4)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
|
||||
@patch("buildscripts.resmokelib.logging.flush._FLUSH_THREAD", None)
|
||||
@patch("atexit.register")
|
||||
class TestResmokeAtExitMetricsCollection(unittest.TestCase):
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
def test_resmoke_at_exit_metrics_collection(self, mock_atexit_register):
|
||||
under_test.entrypoint()
|
||||
|
||||
atexit_functions = [
|
||||
call for call in mock_atexit_register.call_args_list
|
||||
if call[0][0].__name__ == '_verbosity_enforced_save_metrics'
|
||||
]
|
||||
generate_metrics = atexit_functions[0][0][1].generate_metrics
|
||||
kwargs = atexit_functions[0][1]
|
||||
metrics = generate_metrics(**kwargs)
|
||||
|
||||
assert not metrics.is_malformed()
|
||||
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=False))
|
||||
def test_no_resmoke_at_exit_metrics_collection(self, mock_atexit_register):
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_verbosity_enforced_save_metrics" not in atexit_functions
|
||||
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'list-suites'])
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=False))
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
def test_resmoke_no_metric_collection_non_vw(self, mock_atexit_register):
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in mock_atexit_register.call_args_list]
|
||||
assert "_verbosity_enforced_save_metrics" not in atexit_functions
|
||||
|
||||
@patch("sys.argv", ['buildscripts/resmoke.py', 'run', '--suite', 'buildscripts_test'])
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
@patch("buildscripts.resmokelib.testing.executor.TestSuiteExecutor._run_tests",
|
||||
side_effect=Exception())
|
||||
def test_resmoke_at_exit_metrics_collection_exc(self, mock_exc_method, mock_atexit_register):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
|
||||
atexit_functions = [
|
||||
call for call in mock_atexit_register.call_args_list
|
||||
if call[0][0].__name__ == '_verbosity_enforced_save_metrics'
|
||||
]
|
||||
generate_metrics = atexit_functions[0][0][1].generate_metrics
|
||||
kwargs = atexit_functions[0][1]
|
||||
metrics = generate_metrics(**kwargs)
|
||||
|
||||
assert not metrics.is_malformed()
|
@ -0,0 +1,96 @@
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
from mock import MagicMock
|
||||
from mongo_tooling_metrics import client
|
||||
from mongo_tooling_metrics.lib.utils import _is_virtual_workstation
|
||||
from mongo_tooling_metrics.base_metrics import TopLevelMetrics
|
||||
import buildscripts.scons as under_test
|
||||
|
||||
# Metrics collection is not supported for Windows
|
||||
if os.name == "nt":
|
||||
sys.exit()
|
||||
|
||||
|
||||
class InvalidSconsConfiguration(Exception):
|
||||
"""Exception raised if the scons invocation itself fails."""
|
||||
pass
|
||||
|
||||
|
||||
@patch("sys.argv", [
|
||||
'buildscripts/scons.py', "CC=/opt/mongodbtoolchain/v4/bin/gcc",
|
||||
"CXX=/opt/mongodbtoolchain/v4/bin/g++", "NINJA_PREFIX=test_success", "--ninja"
|
||||
])
|
||||
class TestSconsAtExitMetricsCollection(unittest.TestCase):
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch.object(atexit, "register", MagicMock())
|
||||
def at_exit_metrics_collection(self):
|
||||
with self.assertRaises(SystemExit) as exc_info:
|
||||
under_test.entrypoint()
|
||||
|
||||
if exc_info.exception.code != 0:
|
||||
raise InvalidSconsConfiguration("This SCons invocation is not supported on this host.")
|
||||
|
||||
atexit_functions = [
|
||||
call for call in atexit.register.call_args_list
|
||||
if call[0][0].__name__ == '_verbosity_enforced_save_metrics'
|
||||
]
|
||||
generate_metrics = atexit_functions[0][0][1].generate_metrics
|
||||
kwargs = atexit_functions[0][1]
|
||||
metrics = generate_metrics(**kwargs)
|
||||
|
||||
assert not metrics.is_malformed()
|
||||
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=False))
|
||||
@patch.object(atexit, "register", MagicMock())
|
||||
def no_at_exit_metrics_collection(self):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in atexit.register.call_args_list]
|
||||
assert "_verbosity_enforced_save_metrics" not in atexit_functions
|
||||
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=False))
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch.object(atexit, "register", MagicMock())
|
||||
def no_metrics_collection_non_vw(self):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
atexit_functions = [call[0][0].__name__ for call in atexit.register.call_args_list]
|
||||
assert "_verbosity_enforced_save_metrics" not in atexit_functions
|
||||
|
||||
@patch.object(TopLevelMetrics, 'should_collect_metrics', MagicMock(return_value=True))
|
||||
@patch.object(client, 'should_collect_internal_metrics', MagicMock(return_value=True))
|
||||
@patch("buildscripts.moduleconfig.get_module_sconscripts", MagicMock(side_effect=Exception()))
|
||||
@patch.object(atexit, "register", MagicMock())
|
||||
def at_exit_metrics_collection_exc(self):
|
||||
with self.assertRaises(SystemExit) as _:
|
||||
under_test.entrypoint()
|
||||
|
||||
atexit_functions = [
|
||||
call for call in atexit.register.call_args_list
|
||||
if call[0][0].__name__ == '_verbosity_enforced_save_metrics'
|
||||
]
|
||||
generate_metrics = atexit_functions[0][0][1].generate_metrics
|
||||
kwargs = atexit_functions[0][1]
|
||||
metrics = generate_metrics(**kwargs)
|
||||
|
||||
assert not metrics.is_malformed()
|
||||
|
||||
def test_scons_metrics_collection_at_exit(self):
|
||||
"""Run all tests in this TestCase sequentially from this method."""
|
||||
|
||||
try:
|
||||
# If this test fails and this is NOT a Virtual Workstation, we bail because metrics
|
||||
# collection is only supported on virtual workstations
|
||||
self.at_exit_metrics_collection()
|
||||
except InvalidSconsConfiguration:
|
||||
if not _is_virtual_workstation():
|
||||
return
|
||||
raise InvalidSconsConfiguration
|
||||
self.no_at_exit_metrics_collection()
|
||||
self.no_metrics_collection_non_vw()
|
||||
self.at_exit_metrics_collection_exc()
|
@ -1,4 +1 @@
|
||||
distro == 1.5.0
|
||||
GitPython ~= 3.1.7
|
||||
pydantic ~= 1.8.2
|
||||
dnspython == 2.1.0
|
||||
mongo-tooling-metrics == 1.0.4
|
||||
|
@ -4,3 +4,4 @@
|
||||
-r components/testing.req
|
||||
|
||||
-r components/aws.req
|
||||
-r components/tooling_metrics.req
|
||||
|
@ -8,13 +8,13 @@ from pydantic import ValidationError
|
||||
if __name__ == "__main__" and __package__ is None:
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from buildscripts.metrics.metrics_datatypes import ResmokeToolingMetrics, SConsToolingMetrics
|
||||
from buildscripts.metrics.tooling_metrics_utils import _get_internal_tooling_metrics_client
|
||||
from mongo_tooling_metrics.client import get_mongo_metrics_client
|
||||
from mongo_tooling_metrics.lib.top_level_metrics import ResmokeToolingMetrics, SConsToolingMetrics
|
||||
from evergreen.api import RetryingEvergreenApi
|
||||
|
||||
# Check cluster connectivity
|
||||
try:
|
||||
client = _get_internal_tooling_metrics_client()
|
||||
client = get_mongo_metrics_client().mongo_client
|
||||
print(client.server_info())
|
||||
except Exception as exc:
|
||||
print("Could not connect to Atlas cluster")
|
||||
|
Loading…
Reference in New Issue
Block a user