mirror of
https://github.com/django/django.git
synced 2024-12-01 15:42:04 +01:00
9baf692a58
Thanks Tim Graham for polishing the patch, updating the tests, and writing documentation. Thanks Carl Meyer for shepherding the DEP.
493 lines
18 KiB
Python
Executable File
493 lines
18 KiB
Python
Executable File
#!/usr/bin/env python
|
|
import argparse
|
|
import atexit
|
|
import copy
|
|
import os
|
|
import shutil
|
|
import subprocess
|
|
import sys
|
|
import tempfile
|
|
import warnings
|
|
|
|
import django
|
|
from django.apps import apps
|
|
from django.conf import settings
|
|
from django.db import connection, connections
|
|
from django.test import TestCase, TransactionTestCase
|
|
from django.test.runner import default_test_processes
|
|
from django.test.selenium import SeleniumTestCaseBase
|
|
from django.test.utils import get_runner
|
|
from django.utils import six
|
|
from django.utils._os import upath
|
|
from django.utils.deprecation import RemovedInDjango20Warning
|
|
from django.utils.log import DEFAULT_LOGGING
|
|
|
|
# Make deprecation warnings errors to ensure no usage of deprecated features.
|
|
warnings.simplefilter("error", RemovedInDjango20Warning)
|
|
# Make runtime warning errors to ensure no usage of error prone patterns.
|
|
warnings.simplefilter("error", RuntimeWarning)
|
|
# Ignore known warnings in test dependencies.
|
|
warnings.filterwarnings("ignore", "'U' mode is deprecated", DeprecationWarning, module='docutils.io')
|
|
|
|
RUNTESTS_DIR = os.path.abspath(os.path.dirname(upath(__file__)))
|
|
|
|
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates')
|
|
|
|
# Create a specific subdirectory for the duration of the test suite.
|
|
TMPDIR = tempfile.mkdtemp(prefix='django_')
|
|
# Set the TMPDIR environment variable in addition to tempfile.tempdir
|
|
# so that children processes inherit it.
|
|
tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR
|
|
|
|
# Removing the temporary TMPDIR. Ensure we pass in unicode so that it will
|
|
# successfully remove temp trees containing non-ASCII filenames on Windows.
|
|
# (We're assuming the temp dir name itself only contains ASCII characters.)
|
|
atexit.register(shutil.rmtree, six.text_type(TMPDIR))
|
|
|
|
|
|
SUBDIRS_TO_SKIP = [
|
|
'data',
|
|
'import_error_package',
|
|
'test_discovery_sample',
|
|
'test_discovery_sample2',
|
|
]
|
|
|
|
ALWAYS_INSTALLED_APPS = [
|
|
'django.contrib.contenttypes',
|
|
'django.contrib.auth',
|
|
'django.contrib.sites',
|
|
'django.contrib.sessions',
|
|
'django.contrib.messages',
|
|
'django.contrib.admin.apps.SimpleAdminConfig',
|
|
'django.contrib.staticfiles',
|
|
]
|
|
|
|
ALWAYS_MIDDLEWARE = [
|
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
|
'django.middleware.common.CommonMiddleware',
|
|
'django.middleware.csrf.CsrfViewMiddleware',
|
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
|
'django.contrib.messages.middleware.MessageMiddleware',
|
|
]
|
|
|
|
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
|
|
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
|
|
# and isn't in an application in INSTALLED_APPS."
|
|
CONTRIB_TESTS_TO_APPS = {
|
|
'flatpages_tests': 'django.contrib.flatpages',
|
|
'redirects_tests': 'django.contrib.redirects',
|
|
}
|
|
|
|
|
|
def get_test_modules():
|
|
modules = []
|
|
discovery_paths = [
|
|
(None, RUNTESTS_DIR),
|
|
# GIS tests are in nested apps
|
|
('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests')),
|
|
]
|
|
|
|
for modpath, dirpath in discovery_paths:
|
|
for f in os.listdir(dirpath):
|
|
if ('.' in f or
|
|
os.path.basename(f) in SUBDIRS_TO_SKIP or
|
|
os.path.isfile(f) or
|
|
not os.path.exists(os.path.join(dirpath, f, '__init__.py'))):
|
|
continue
|
|
modules.append((modpath, f))
|
|
return modules
|
|
|
|
|
|
def get_installed():
|
|
return [app_config.name for app_config in apps.get_app_configs()]
|
|
|
|
|
|
def setup(verbosity, test_labels, parallel):
|
|
if verbosity >= 1:
|
|
msg = "Testing against Django installed in '%s'" % os.path.dirname(django.__file__)
|
|
max_parallel = default_test_processes() if parallel == 0 else parallel
|
|
if max_parallel > 1:
|
|
msg += " with up to %d processes" % max_parallel
|
|
print(msg)
|
|
|
|
# Force declaring available_apps in TransactionTestCase for faster tests.
|
|
def no_available_apps(self):
|
|
raise Exception("Please define available_apps in TransactionTestCase "
|
|
"and its subclasses.")
|
|
TransactionTestCase.available_apps = property(no_available_apps)
|
|
TestCase.available_apps = None
|
|
|
|
state = {
|
|
'INSTALLED_APPS': settings.INSTALLED_APPS,
|
|
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
|
|
'TEMPLATES': settings.TEMPLATES,
|
|
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
|
|
'STATIC_URL': settings.STATIC_URL,
|
|
'STATIC_ROOT': settings.STATIC_ROOT,
|
|
'MIDDLEWARE': settings.MIDDLEWARE,
|
|
}
|
|
|
|
# Redirect some settings for the duration of these tests.
|
|
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
|
|
settings.ROOT_URLCONF = 'urls'
|
|
settings.STATIC_URL = '/static/'
|
|
settings.STATIC_ROOT = os.path.join(TMPDIR, 'static')
|
|
settings.TEMPLATES = [{
|
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
|
'DIRS': [TEMPLATE_DIR],
|
|
'APP_DIRS': True,
|
|
'OPTIONS': {
|
|
'context_processors': [
|
|
'django.template.context_processors.debug',
|
|
'django.template.context_processors.request',
|
|
'django.contrib.auth.context_processors.auth',
|
|
'django.contrib.messages.context_processors.messages',
|
|
],
|
|
},
|
|
}]
|
|
settings.LANGUAGE_CODE = 'en'
|
|
settings.SITE_ID = 1
|
|
settings.MIDDLEWARE = ALWAYS_MIDDLEWARE
|
|
settings.MIGRATION_MODULES = {
|
|
# This lets us skip creating migrations for the test models as many of
|
|
# them depend on one of the following contrib applications.
|
|
'auth': None,
|
|
'contenttypes': None,
|
|
'sessions': None,
|
|
}
|
|
log_config = copy.deepcopy(DEFAULT_LOGGING)
|
|
# Filter out non-error logging so we don't have to capture it in lots of
|
|
# tests.
|
|
log_config['loggers']['django']['level'] = 'ERROR'
|
|
settings.LOGGING = log_config
|
|
|
|
warnings.filterwarnings(
|
|
'ignore',
|
|
'The GeoManager class is deprecated.',
|
|
RemovedInDjango20Warning
|
|
)
|
|
|
|
# Load all the ALWAYS_INSTALLED_APPS.
|
|
django.setup()
|
|
|
|
# Load all the test model apps.
|
|
test_modules = get_test_modules()
|
|
|
|
# Reduce given test labels to just the app module path
|
|
test_labels_set = set()
|
|
for label in test_labels:
|
|
bits = label.split('.')[:1]
|
|
test_labels_set.add('.'.join(bits))
|
|
|
|
installed_app_names = set(get_installed())
|
|
for modpath, module_name in test_modules:
|
|
if modpath:
|
|
module_label = '.'.join([modpath, module_name])
|
|
else:
|
|
module_label = module_name
|
|
# if the module (or an ancestor) was named on the command line, or
|
|
# no modules were named (i.e., run all), import
|
|
# this module and add it to INSTALLED_APPS.
|
|
if not test_labels:
|
|
module_found_in_labels = True
|
|
else:
|
|
module_found_in_labels = any(
|
|
# exact match or ancestor match
|
|
module_label == label or module_label.startswith(label + '.')
|
|
for label in test_labels_set)
|
|
|
|
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
|
|
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
|
|
|
|
if module_found_in_labels and module_label not in installed_app_names:
|
|
if verbosity >= 2:
|
|
print("Importing application %s" % module_name)
|
|
settings.INSTALLED_APPS.append(module_label)
|
|
|
|
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
|
|
# @override_settings(INSTALLED_APPS=...) on all test cases.
|
|
gis = 'django.contrib.gis'
|
|
if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS:
|
|
if verbosity >= 2:
|
|
print("Importing application %s" % gis)
|
|
settings.INSTALLED_APPS.append(gis)
|
|
|
|
apps.set_installed_apps(settings.INSTALLED_APPS)
|
|
|
|
return state
|
|
|
|
|
|
def teardown(state):
|
|
# Restore the old settings.
|
|
for key, value in state.items():
|
|
setattr(settings, key, value)
|
|
|
|
|
|
def actual_test_processes(parallel):
|
|
if parallel == 0:
|
|
# This doesn't work before django.setup() on some databases.
|
|
if all(conn.features.can_clone_databases for conn in connections.all()):
|
|
return default_test_processes()
|
|
else:
|
|
return 1
|
|
else:
|
|
return parallel
|
|
|
|
|
|
class ActionSelenium(argparse.Action):
|
|
"""
|
|
Validate the comma-separated list of requested browsers.
|
|
"""
|
|
def __call__(self, parser, namespace, values, option_string=None):
|
|
browsers = values.split(',')
|
|
for browser in browsers:
|
|
try:
|
|
SeleniumTestCaseBase.import_webdriver(browser)
|
|
except ImportError:
|
|
raise argparse.ArgumentError(self, "Selenium browser specification '%s' is not valid." % browser)
|
|
setattr(namespace, self.dest, browsers)
|
|
|
|
|
|
def django_tests(verbosity, interactive, failfast, keepdb, reverse,
|
|
test_labels, debug_sql, parallel, tags, exclude_tags):
|
|
state = setup(verbosity, test_labels, parallel)
|
|
extra_tests = []
|
|
|
|
# Run the test suite, including the extra validation tests.
|
|
if not hasattr(settings, 'TEST_RUNNER'):
|
|
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
|
TestRunner = get_runner(settings)
|
|
|
|
test_runner = TestRunner(
|
|
verbosity=verbosity,
|
|
interactive=interactive,
|
|
failfast=failfast,
|
|
keepdb=keepdb,
|
|
reverse=reverse,
|
|
debug_sql=debug_sql,
|
|
parallel=actual_test_processes(parallel),
|
|
tags=tags,
|
|
exclude_tags=exclude_tags,
|
|
)
|
|
failures = test_runner.run_tests(
|
|
test_labels or get_installed(),
|
|
extra_tests=extra_tests,
|
|
)
|
|
teardown(state)
|
|
return failures
|
|
|
|
|
|
def get_subprocess_args(options):
|
|
subprocess_args = [
|
|
sys.executable, upath(__file__), '--settings=%s' % options.settings
|
|
]
|
|
if options.failfast:
|
|
subprocess_args.append('--failfast')
|
|
if options.verbosity:
|
|
subprocess_args.append('--verbosity=%s' % options.verbosity)
|
|
if not options.interactive:
|
|
subprocess_args.append('--noinput')
|
|
if options.tags:
|
|
subprocess_args.append('--tag=%s' % options.tags)
|
|
if options.exclude_tags:
|
|
subprocess_args.append('--exclude_tag=%s' % options.exclude_tags)
|
|
return subprocess_args
|
|
|
|
|
|
def bisect_tests(bisection_label, options, test_labels, parallel):
|
|
state = setup(options.verbosity, test_labels, parallel)
|
|
|
|
test_labels = test_labels or get_installed()
|
|
|
|
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
|
|
|
|
# Make sure the bisection point isn't in the test list
|
|
# Also remove tests that need to be run in specific combinations
|
|
for label in [bisection_label, 'model_inheritance_same_model_name']:
|
|
try:
|
|
test_labels.remove(label)
|
|
except ValueError:
|
|
pass
|
|
|
|
subprocess_args = get_subprocess_args(options)
|
|
|
|
iteration = 1
|
|
while len(test_labels) > 1:
|
|
midpoint = len(test_labels) // 2
|
|
test_labels_a = test_labels[:midpoint] + [bisection_label]
|
|
test_labels_b = test_labels[midpoint:] + [bisection_label]
|
|
print('***** Pass %da: Running the first half of the test suite' % iteration)
|
|
print('***** Test labels: %s' % ' '.join(test_labels_a))
|
|
failures_a = subprocess.call(subprocess_args + test_labels_a)
|
|
|
|
print('***** Pass %db: Running the second half of the test suite' % iteration)
|
|
print('***** Test labels: %s' % ' '.join(test_labels_b))
|
|
print('')
|
|
failures_b = subprocess.call(subprocess_args + test_labels_b)
|
|
|
|
if failures_a and not failures_b:
|
|
print("***** Problem found in first half. Bisecting again...")
|
|
iteration = iteration + 1
|
|
test_labels = test_labels_a[:-1]
|
|
elif failures_b and not failures_a:
|
|
print("***** Problem found in second half. Bisecting again...")
|
|
iteration = iteration + 1
|
|
test_labels = test_labels_b[:-1]
|
|
elif failures_a and failures_b:
|
|
print("***** Multiple sources of failure found")
|
|
break
|
|
else:
|
|
print("***** No source of failure found... try pair execution (--pair)")
|
|
break
|
|
|
|
if len(test_labels) == 1:
|
|
print("***** Source of error: %s" % test_labels[0])
|
|
teardown(state)
|
|
|
|
|
|
def paired_tests(paired_test, options, test_labels, parallel):
|
|
state = setup(options.verbosity, test_labels, parallel)
|
|
|
|
test_labels = test_labels or get_installed()
|
|
|
|
print('***** Trying paired execution')
|
|
|
|
# Make sure the constant member of the pair isn't in the test list
|
|
# Also remove tests that need to be run in specific combinations
|
|
for label in [paired_test, 'model_inheritance_same_model_name']:
|
|
try:
|
|
test_labels.remove(label)
|
|
except ValueError:
|
|
pass
|
|
|
|
subprocess_args = get_subprocess_args(options)
|
|
|
|
for i, label in enumerate(test_labels):
|
|
print('***** %d of %d: Check test pairing with %s' % (
|
|
i + 1, len(test_labels), label))
|
|
failures = subprocess.call(subprocess_args + [label, paired_test])
|
|
if failures:
|
|
print('***** Found problem pair with %s' % label)
|
|
return
|
|
|
|
print('***** No problem pair found')
|
|
teardown(state)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
parser = argparse.ArgumentParser(description="Run the Django test suite.")
|
|
parser.add_argument(
|
|
'modules', nargs='*', metavar='module',
|
|
help='Optional path(s) to test modules; e.g. "i18n" or '
|
|
'"i18n.tests.TranslationTests.test_lazy_objects".',
|
|
)
|
|
parser.add_argument(
|
|
'-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3],
|
|
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output',
|
|
)
|
|
parser.add_argument(
|
|
'--noinput', action='store_false', dest='interactive', default=True,
|
|
help='Tells Django to NOT prompt the user for input of any kind.',
|
|
)
|
|
parser.add_argument(
|
|
'--failfast', action='store_true', dest='failfast', default=False,
|
|
help='Tells Django to stop running the test suite after first failed test.',
|
|
)
|
|
parser.add_argument(
|
|
'-k', '--keepdb', action='store_true', dest='keepdb', default=False,
|
|
help='Tells Django to preserve the test database between runs.',
|
|
)
|
|
parser.add_argument(
|
|
'--settings',
|
|
help='Python path to settings module, e.g. "myproject.settings". If '
|
|
'this isn\'t provided, either the DJANGO_SETTINGS_MODULE '
|
|
'environment variable or "test_sqlite" will be used.',
|
|
)
|
|
parser.add_argument(
|
|
'--bisect',
|
|
help='Bisect the test suite to discover a test that causes a test '
|
|
'failure when combined with the named test.',
|
|
)
|
|
parser.add_argument(
|
|
'--pair',
|
|
help='Run the test suite in pairs with the named test to find problem pairs.',
|
|
)
|
|
parser.add_argument(
|
|
'--reverse', action='store_true', default=False,
|
|
help='Sort test suites and test cases in opposite order to debug '
|
|
'test side effects not apparent with normal execution lineup.',
|
|
)
|
|
parser.add_argument(
|
|
'--liveserver',
|
|
help='Overrides the default address where the live server (used with '
|
|
'LiveServerTestCase) is expected to run from. The default value '
|
|
'is localhost:8081-8179.',
|
|
)
|
|
parser.add_argument(
|
|
'--selenium', dest='selenium', action=ActionSelenium, metavar='BROWSERS',
|
|
help='A comma-separated list of browsers to run the Selenium tests against.',
|
|
)
|
|
parser.add_argument(
|
|
'--debug-sql', action='store_true', dest='debug_sql', default=False,
|
|
help='Turn on the SQL query logger within tests.',
|
|
)
|
|
parser.add_argument(
|
|
'--parallel', dest='parallel', nargs='?', default=0, type=int,
|
|
const=default_test_processes(), metavar='N',
|
|
help='Run tests using up to N parallel processes.',
|
|
)
|
|
parser.add_argument(
|
|
'--tag', dest='tags', action='append',
|
|
help='Run only tests with the specified tags. Can be used multiple times.',
|
|
)
|
|
parser.add_argument(
|
|
'--exclude-tag', dest='exclude_tags', action='append',
|
|
help='Do not run tests with the specified tag. Can be used multiple times.',
|
|
)
|
|
|
|
options = parser.parse_args()
|
|
|
|
# mock is a required dependency
|
|
try:
|
|
from django.test import mock # NOQA
|
|
except ImportError:
|
|
print(
|
|
"Please install test dependencies first: \n"
|
|
"$ pip install -r requirements/py%s.txt" % sys.version_info.major
|
|
)
|
|
sys.exit(1)
|
|
|
|
# Allow including a trailing slash on app_labels for tab completion convenience
|
|
options.modules = [os.path.normpath(labels) for labels in options.modules]
|
|
|
|
if options.settings:
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
|
|
else:
|
|
if "DJANGO_SETTINGS_MODULE" not in os.environ:
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_sqlite'
|
|
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
|
|
|
|
if options.liveserver is not None:
|
|
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options.liveserver
|
|
|
|
if options.selenium:
|
|
if not options.tags:
|
|
options.tags = ['selenium']
|
|
elif 'selenium' not in options.tags:
|
|
options.tags.append('selenium')
|
|
SeleniumTestCaseBase.browsers = options.selenium
|
|
|
|
if options.bisect:
|
|
bisect_tests(options.bisect, options, options.modules, options.parallel)
|
|
elif options.pair:
|
|
paired_tests(options.pair, options, options.modules, options.parallel)
|
|
else:
|
|
failures = django_tests(
|
|
options.verbosity, options.interactive, options.failfast,
|
|
options.keepdb, options.reverse, options.modules,
|
|
options.debug_sql, options.parallel, options.tags,
|
|
options.exclude_tags,
|
|
)
|
|
if failures:
|
|
sys.exit(bool(failures))
|