Ajoutez des fichiers projet.
This commit is contained in:
425
venv/Lib/site-packages/django/core/management/__init__.py
Normal file
425
venv/Lib/site-packages/django/core/management/__init__.py
Normal file
@@ -0,0 +1,425 @@
|
||||
import functools
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
from argparse import (
|
||||
_AppendConstAction, _CountAction, _StoreConstAction, _SubParsersAction,
|
||||
)
|
||||
from collections import defaultdict
|
||||
from difflib import get_close_matches
|
||||
from importlib import import_module
|
||||
|
||||
import django
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management.base import (
|
||||
BaseCommand, CommandError, CommandParser, handle_default_options,
|
||||
)
|
||||
from django.core.management.color import color_style
|
||||
from django.utils import autoreload
|
||||
|
||||
|
||||
def find_commands(management_dir):
|
||||
"""
|
||||
Given a path to a management directory, return a list of all the command
|
||||
names that are available.
|
||||
"""
|
||||
command_dir = os.path.join(management_dir, 'commands')
|
||||
return [name for _, name, is_pkg in pkgutil.iter_modules([command_dir])
|
||||
if not is_pkg and not name.startswith('_')]
|
||||
|
||||
|
||||
def load_command_class(app_name, name):
|
||||
"""
|
||||
Given a command name and an application name, return the Command
|
||||
class instance. Allow all errors raised by the import process
|
||||
(ImportError, AttributeError) to propagate.
|
||||
"""
|
||||
module = import_module('%s.management.commands.%s' % (app_name, name))
|
||||
return module.Command()
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_commands():
|
||||
"""
|
||||
Return a dictionary mapping command names to their callback applications.
|
||||
|
||||
Look for a management.commands package in django.core, and in each
|
||||
installed application -- if a commands package exists, register all
|
||||
commands in that package.
|
||||
|
||||
Core commands are always included. If a settings module has been
|
||||
specified, also include user-defined commands.
|
||||
|
||||
The dictionary is in the format {command_name: app_name}. Key-value
|
||||
pairs from this dictionary can then be used in calls to
|
||||
load_command_class(app_name, command_name)
|
||||
|
||||
If a specific version of a command must be loaded (e.g., with the
|
||||
startapp command), the instantiated module can be placed in the
|
||||
dictionary in place of the application name.
|
||||
|
||||
The dictionary is cached on the first call and reused on subsequent
|
||||
calls.
|
||||
"""
|
||||
commands = {name: 'django.core' for name in find_commands(__path__[0])}
|
||||
|
||||
if not settings.configured:
|
||||
return commands
|
||||
|
||||
for app_config in reversed(list(apps.get_app_configs())):
|
||||
path = os.path.join(app_config.path, 'management')
|
||||
commands.update({name: app_config.name for name in find_commands(path)})
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def call_command(command_name, *args, **options):
|
||||
"""
|
||||
Call the given command, with the given options and args/kwargs.
|
||||
|
||||
This is the primary API you should use for calling specific commands.
|
||||
|
||||
`command_name` may be a string or a command object. Using a string is
|
||||
preferred unless the command object is required for further processing or
|
||||
testing.
|
||||
|
||||
Some examples:
|
||||
call_command('migrate')
|
||||
call_command('shell', plain=True)
|
||||
call_command('sqlmigrate', 'myapp')
|
||||
|
||||
from django.core.management.commands import flush
|
||||
cmd = flush.Command()
|
||||
call_command(cmd, verbosity=0, interactive=False)
|
||||
# Do something with cmd ...
|
||||
"""
|
||||
if isinstance(command_name, BaseCommand):
|
||||
# Command object passed in.
|
||||
command = command_name
|
||||
command_name = command.__class__.__module__.split('.')[-1]
|
||||
else:
|
||||
# Load the command object by name.
|
||||
try:
|
||||
app_name = get_commands()[command_name]
|
||||
except KeyError:
|
||||
raise CommandError("Unknown command: %r" % command_name)
|
||||
|
||||
if isinstance(app_name, BaseCommand):
|
||||
# If the command is already loaded, use it directly.
|
||||
command = app_name
|
||||
else:
|
||||
command = load_command_class(app_name, command_name)
|
||||
|
||||
# Simulate argument parsing to get the option defaults (see #10080 for details).
|
||||
parser = command.create_parser('', command_name)
|
||||
# Use the `dest` option name from the parser option
|
||||
opt_mapping = {
|
||||
min(s_opt.option_strings).lstrip('-').replace('-', '_'): s_opt.dest
|
||||
for s_opt in parser._actions if s_opt.option_strings
|
||||
}
|
||||
arg_options = {opt_mapping.get(key, key): value for key, value in options.items()}
|
||||
parse_args = []
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple)):
|
||||
parse_args += map(str, arg)
|
||||
else:
|
||||
parse_args.append(str(arg))
|
||||
|
||||
def get_actions(parser):
|
||||
# Parser actions and actions from sub-parser choices.
|
||||
for opt in parser._actions:
|
||||
if isinstance(opt, _SubParsersAction):
|
||||
for sub_opt in opt.choices.values():
|
||||
yield from get_actions(sub_opt)
|
||||
else:
|
||||
yield opt
|
||||
|
||||
parser_actions = list(get_actions(parser))
|
||||
mutually_exclusive_required_options = {
|
||||
opt
|
||||
for group in parser._mutually_exclusive_groups
|
||||
for opt in group._group_actions if group.required
|
||||
}
|
||||
# Any required arguments which are passed in via **options must be passed
|
||||
# to parse_args().
|
||||
for opt in parser_actions:
|
||||
if (
|
||||
opt.dest in options and
|
||||
(opt.required or opt in mutually_exclusive_required_options)
|
||||
):
|
||||
opt_dest_count = sum(v == opt.dest for v in opt_mapping.values())
|
||||
if opt_dest_count > 1:
|
||||
raise TypeError(
|
||||
f'Cannot pass the dest {opt.dest!r} that matches multiple '
|
||||
f'arguments via **options.'
|
||||
)
|
||||
parse_args.append(min(opt.option_strings))
|
||||
if isinstance(opt, (_AppendConstAction, _CountAction, _StoreConstAction)):
|
||||
continue
|
||||
value = arg_options[opt.dest]
|
||||
if isinstance(value, (list, tuple)):
|
||||
parse_args += map(str, value)
|
||||
else:
|
||||
parse_args.append(str(value))
|
||||
defaults = parser.parse_args(args=parse_args)
|
||||
defaults = dict(defaults._get_kwargs(), **arg_options)
|
||||
# Raise an error if any unknown options were passed.
|
||||
stealth_options = set(command.base_stealth_options + command.stealth_options)
|
||||
dest_parameters = {action.dest for action in parser_actions}
|
||||
valid_options = (dest_parameters | stealth_options).union(opt_mapping)
|
||||
unknown_options = set(options) - valid_options
|
||||
if unknown_options:
|
||||
raise TypeError(
|
||||
"Unknown option(s) for %s command: %s. "
|
||||
"Valid options are: %s." % (
|
||||
command_name,
|
||||
', '.join(sorted(unknown_options)),
|
||||
', '.join(sorted(valid_options)),
|
||||
)
|
||||
)
|
||||
# Move positional args out of options to mimic legacy optparse
|
||||
args = defaults.pop('args', ())
|
||||
if 'skip_checks' not in options:
|
||||
defaults['skip_checks'] = True
|
||||
|
||||
return command.execute(*args, **defaults)
|
||||
|
||||
|
||||
class ManagementUtility:
|
||||
"""
|
||||
Encapsulate the logic of the django-admin and manage.py utilities.
|
||||
"""
|
||||
def __init__(self, argv=None):
|
||||
self.argv = argv or sys.argv[:]
|
||||
self.prog_name = os.path.basename(self.argv[0])
|
||||
if self.prog_name == '__main__.py':
|
||||
self.prog_name = 'python -m django'
|
||||
self.settings_exception = None
|
||||
|
||||
def main_help_text(self, commands_only=False):
|
||||
"""Return the script's main help text, as a string."""
|
||||
if commands_only:
|
||||
usage = sorted(get_commands())
|
||||
else:
|
||||
usage = [
|
||||
"",
|
||||
"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
|
||||
"",
|
||||
"Available subcommands:",
|
||||
]
|
||||
commands_dict = defaultdict(lambda: [])
|
||||
for name, app in get_commands().items():
|
||||
if app == 'django.core':
|
||||
app = 'django'
|
||||
else:
|
||||
app = app.rpartition('.')[-1]
|
||||
commands_dict[app].append(name)
|
||||
style = color_style()
|
||||
for app in sorted(commands_dict):
|
||||
usage.append("")
|
||||
usage.append(style.NOTICE("[%s]" % app))
|
||||
for name in sorted(commands_dict[app]):
|
||||
usage.append(" %s" % name)
|
||||
# Output an extra note if settings are not properly configured
|
||||
if self.settings_exception is not None:
|
||||
usage.append(style.NOTICE(
|
||||
"Note that only Django core commands are listed "
|
||||
"as settings are not properly configured (error: %s)."
|
||||
% self.settings_exception))
|
||||
|
||||
return '\n'.join(usage)
|
||||
|
||||
def fetch_command(self, subcommand):
|
||||
"""
|
||||
Try to fetch the given subcommand, printing a message with the
|
||||
appropriate command called from the command line (usually
|
||||
"django-admin" or "manage.py") if it can't be found.
|
||||
"""
|
||||
# Get commands outside of try block to prevent swallowing exceptions
|
||||
commands = get_commands()
|
||||
try:
|
||||
app_name = commands[subcommand]
|
||||
except KeyError:
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE'):
|
||||
# If `subcommand` is missing due to misconfigured settings, the
|
||||
# following line will retrigger an ImproperlyConfigured exception
|
||||
# (get_commands() swallows the original one) so the user is
|
||||
# informed about it.
|
||||
settings.INSTALLED_APPS
|
||||
elif not settings.configured:
|
||||
sys.stderr.write("No Django settings specified.\n")
|
||||
possible_matches = get_close_matches(subcommand, commands)
|
||||
sys.stderr.write('Unknown command: %r' % subcommand)
|
||||
if possible_matches:
|
||||
sys.stderr.write('. Did you mean %s?' % possible_matches[0])
|
||||
sys.stderr.write("\nType '%s help' for usage.\n" % self.prog_name)
|
||||
sys.exit(1)
|
||||
if isinstance(app_name, BaseCommand):
|
||||
# If the command is already loaded, use it directly.
|
||||
klass = app_name
|
||||
else:
|
||||
klass = load_command_class(app_name, subcommand)
|
||||
return klass
|
||||
|
||||
def autocomplete(self):
|
||||
"""
|
||||
Output completion suggestions for BASH.
|
||||
|
||||
The output of this function is passed to BASH's `COMREPLY` variable and
|
||||
treated as completion suggestions. `COMREPLY` expects a space
|
||||
separated string as the result.
|
||||
|
||||
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
|
||||
to get information about the cli input. Please refer to the BASH
|
||||
man-page for more information about this variables.
|
||||
|
||||
Subcommand options are saved as pairs. A pair consists of
|
||||
the long option string (e.g. '--exclude') and a boolean
|
||||
value indicating if the option requires arguments. When printing to
|
||||
stdout, an equal sign is appended to options which require arguments.
|
||||
|
||||
Note: If debugging this function, it is recommended to write the debug
|
||||
output in a separate file. Otherwise the debug output will be treated
|
||||
and formatted as potential completion suggestions.
|
||||
"""
|
||||
# Don't complete if user hasn't sourced bash_completion file.
|
||||
if 'DJANGO_AUTO_COMPLETE' not in os.environ:
|
||||
return
|
||||
|
||||
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||
cword = int(os.environ['COMP_CWORD'])
|
||||
|
||||
try:
|
||||
curr = cwords[cword - 1]
|
||||
except IndexError:
|
||||
curr = ''
|
||||
|
||||
subcommands = [*get_commands(), 'help']
|
||||
options = [('--help', False)]
|
||||
|
||||
# subcommand
|
||||
if cword == 1:
|
||||
print(' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
|
||||
# subcommand options
|
||||
# special case: the 'help' subcommand has no options
|
||||
elif cwords[0] in subcommands and cwords[0] != 'help':
|
||||
subcommand_cls = self.fetch_command(cwords[0])
|
||||
# special case: add the names of installed apps to options
|
||||
if cwords[0] in ('dumpdata', 'sqlmigrate', 'sqlsequencereset', 'test'):
|
||||
try:
|
||||
app_configs = apps.get_app_configs()
|
||||
# Get the last part of the dotted path as the app name.
|
||||
options.extend((app_config.label, 0) for app_config in app_configs)
|
||||
except ImportError:
|
||||
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
|
||||
# user will find out once they execute the command.
|
||||
pass
|
||||
parser = subcommand_cls.create_parser('', cwords[0])
|
||||
options.extend(
|
||||
(min(s_opt.option_strings), s_opt.nargs != 0)
|
||||
for s_opt in parser._actions if s_opt.option_strings
|
||||
)
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = {x.split('=')[0] for x in cwords[1:cword - 1]}
|
||||
options = (opt for opt in options if opt[0] not in prev_opts)
|
||||
|
||||
# filter options by current input
|
||||
options = sorted((k, v) for k, v in options if k.startswith(curr))
|
||||
for opt_label, require_arg in options:
|
||||
# append '=' to options which require args
|
||||
if require_arg:
|
||||
opt_label += '='
|
||||
print(opt_label)
|
||||
# Exit code of the bash completion function is never passed back to
|
||||
# the user, so it's safe to always exit with 0.
|
||||
# For more details see #25420.
|
||||
sys.exit(0)
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
Given the command-line arguments, figure out which subcommand is being
|
||||
run, create a parser appropriate to that command, and run it.
|
||||
"""
|
||||
try:
|
||||
subcommand = self.argv[1]
|
||||
except IndexError:
|
||||
subcommand = 'help' # Display help if no arguments were given.
|
||||
|
||||
# Preprocess options to extract --settings and --pythonpath.
|
||||
# These options could affect the commands that are available, so they
|
||||
# must be processed early.
|
||||
parser = CommandParser(
|
||||
prog=self.prog_name,
|
||||
usage='%(prog)s subcommand [options] [args]',
|
||||
add_help=False,
|
||||
allow_abbrev=False,
|
||||
)
|
||||
parser.add_argument('--settings')
|
||||
parser.add_argument('--pythonpath')
|
||||
parser.add_argument('args', nargs='*') # catch-all
|
||||
try:
|
||||
options, args = parser.parse_known_args(self.argv[2:])
|
||||
handle_default_options(options)
|
||||
except CommandError:
|
||||
pass # Ignore any option errors at this point.
|
||||
|
||||
try:
|
||||
settings.INSTALLED_APPS
|
||||
except ImproperlyConfigured as exc:
|
||||
self.settings_exception = exc
|
||||
except ImportError as exc:
|
||||
self.settings_exception = exc
|
||||
|
||||
if settings.configured:
|
||||
# Start the auto-reloading dev server even if the code is broken.
|
||||
# The hardcoded condition is a code smell but we can't rely on a
|
||||
# flag on the command class because we haven't located it yet.
|
||||
if subcommand == 'runserver' and '--noreload' not in self.argv:
|
||||
try:
|
||||
autoreload.check_errors(django.setup)()
|
||||
except Exception:
|
||||
# The exception will be raised later in the child process
|
||||
# started by the autoreloader. Pretend it didn't happen by
|
||||
# loading an empty list of applications.
|
||||
apps.all_models = defaultdict(dict)
|
||||
apps.app_configs = {}
|
||||
apps.apps_ready = apps.models_ready = apps.ready = True
|
||||
|
||||
# Remove options not compatible with the built-in runserver
|
||||
# (e.g. options for the contrib.staticfiles' runserver).
|
||||
# Changes here require manually testing as described in
|
||||
# #27522.
|
||||
_parser = self.fetch_command('runserver').create_parser('django', 'runserver')
|
||||
_options, _args = _parser.parse_known_args(self.argv[2:])
|
||||
for _arg in _args:
|
||||
self.argv.remove(_arg)
|
||||
|
||||
# In all other cases, django.setup() is required to succeed.
|
||||
else:
|
||||
django.setup()
|
||||
|
||||
self.autocomplete()
|
||||
|
||||
if subcommand == 'help':
|
||||
if '--commands' in args:
|
||||
sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
|
||||
elif not options.args:
|
||||
sys.stdout.write(self.main_help_text() + '\n')
|
||||
else:
|
||||
self.fetch_command(options.args[0]).print_help(self.prog_name, options.args[0])
|
||||
# Special-cases: We want 'django-admin --version' and
|
||||
# 'django-admin --help' to work, for backwards compatibility.
|
||||
elif subcommand == 'version' or self.argv[1:] == ['--version']:
|
||||
sys.stdout.write(django.get_version() + '\n')
|
||||
elif self.argv[1:] in (['--help'], ['-h']):
|
||||
sys.stdout.write(self.main_help_text() + '\n')
|
||||
else:
|
||||
self.fetch_command(subcommand).run_from_argv(self.argv)
|
||||
|
||||
|
||||
def execute_from_command_line(argv=None):
|
||||
"""Run a ManagementUtility."""
|
||||
utility = ManagementUtility(argv)
|
||||
utility.execute()
|
600
venv/Lib/site-packages/django/core/management/base.py
Normal file
600
venv/Lib/site-packages/django/core/management/base.py
Normal file
@@ -0,0 +1,600 @@
|
||||
"""
|
||||
Base classes for writing management commands (named commands which can
|
||||
be executed through ``django-admin`` or ``manage.py``).
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from argparse import ArgumentParser, HelpFormatter
|
||||
from io import TextIOBase
|
||||
|
||||
import django
|
||||
from django.core import checks
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management.color import color_style, no_style
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
from django.utils.deprecation import RemovedInDjango41Warning
|
||||
|
||||
ALL_CHECKS = '__all__'
|
||||
|
||||
|
||||
class CommandError(Exception):
|
||||
"""
|
||||
Exception class indicating a problem while executing a management
|
||||
command.
|
||||
|
||||
If this exception is raised during the execution of a management
|
||||
command, it will be caught and turned into a nicely-printed error
|
||||
message to the appropriate output stream (i.e., stderr); as a
|
||||
result, raising this exception (with a sensible description of the
|
||||
error) is the preferred way to indicate that something has gone
|
||||
wrong in the execution of a command.
|
||||
"""
|
||||
def __init__(self, *args, returncode=1, **kwargs):
|
||||
self.returncode = returncode
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class SystemCheckError(CommandError):
|
||||
"""
|
||||
The system check framework detected unrecoverable errors.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CommandParser(ArgumentParser):
|
||||
"""
|
||||
Customized ArgumentParser class to improve some error messages and prevent
|
||||
SystemExit in several occasions, as SystemExit is unacceptable when a
|
||||
command is called programmatically.
|
||||
"""
|
||||
def __init__(self, *, missing_args_message=None, called_from_command_line=None, **kwargs):
|
||||
self.missing_args_message = missing_args_message
|
||||
self.called_from_command_line = called_from_command_line
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def parse_args(self, args=None, namespace=None):
|
||||
# Catch missing argument for a better error message
|
||||
if (self.missing_args_message and
|
||||
not (args or any(not arg.startswith('-') for arg in args))):
|
||||
self.error(self.missing_args_message)
|
||||
return super().parse_args(args, namespace)
|
||||
|
||||
def error(self, message):
|
||||
if self.called_from_command_line:
|
||||
super().error(message)
|
||||
else:
|
||||
raise CommandError("Error: %s" % message)
|
||||
|
||||
|
||||
def handle_default_options(options):
|
||||
"""
|
||||
Include any default options that all commands should accept here
|
||||
so that ManagementUtility can handle them before searching for
|
||||
user commands.
|
||||
"""
|
||||
if options.settings:
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
|
||||
if options.pythonpath:
|
||||
sys.path.insert(0, options.pythonpath)
|
||||
|
||||
|
||||
def no_translations(handle_func):
|
||||
"""Decorator that forces a command to run with translations deactivated."""
|
||||
def wrapped(*args, **kwargs):
|
||||
from django.utils import translation
|
||||
saved_locale = translation.get_language()
|
||||
translation.deactivate_all()
|
||||
try:
|
||||
res = handle_func(*args, **kwargs)
|
||||
finally:
|
||||
if saved_locale is not None:
|
||||
translation.activate(saved_locale)
|
||||
return res
|
||||
return wrapped
|
||||
|
||||
|
||||
class DjangoHelpFormatter(HelpFormatter):
|
||||
"""
|
||||
Customized formatter so that command-specific arguments appear in the
|
||||
--help output before arguments common to all commands.
|
||||
"""
|
||||
show_last = {
|
||||
'--version', '--verbosity', '--traceback', '--settings', '--pythonpath',
|
||||
'--no-color', '--force-color', '--skip-checks',
|
||||
}
|
||||
|
||||
def _reordered_actions(self, actions):
|
||||
return sorted(
|
||||
actions,
|
||||
key=lambda a: set(a.option_strings) & self.show_last != set()
|
||||
)
|
||||
|
||||
def add_usage(self, usage, actions, *args, **kwargs):
|
||||
super().add_usage(usage, self._reordered_actions(actions), *args, **kwargs)
|
||||
|
||||
def add_arguments(self, actions):
|
||||
super().add_arguments(self._reordered_actions(actions))
|
||||
|
||||
|
||||
class OutputWrapper(TextIOBase):
|
||||
"""
|
||||
Wrapper around stdout/stderr
|
||||
"""
|
||||
@property
|
||||
def style_func(self):
|
||||
return self._style_func
|
||||
|
||||
@style_func.setter
|
||||
def style_func(self, style_func):
|
||||
if style_func and self.isatty():
|
||||
self._style_func = style_func
|
||||
else:
|
||||
self._style_func = lambda x: x
|
||||
|
||||
def __init__(self, out, ending='\n'):
|
||||
self._out = out
|
||||
self.style_func = None
|
||||
self.ending = ending
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._out, name)
|
||||
|
||||
def flush(self):
|
||||
if hasattr(self._out, 'flush'):
|
||||
self._out.flush()
|
||||
|
||||
def isatty(self):
|
||||
return hasattr(self._out, 'isatty') and self._out.isatty()
|
||||
|
||||
def write(self, msg='', style_func=None, ending=None):
|
||||
ending = self.ending if ending is None else ending
|
||||
if ending and not msg.endswith(ending):
|
||||
msg += ending
|
||||
style_func = style_func or self.style_func
|
||||
self._out.write(style_func(msg))
|
||||
|
||||
|
||||
class BaseCommand:
|
||||
"""
|
||||
The base class from which all management commands ultimately
|
||||
derive.
|
||||
|
||||
Use this class if you want access to all of the mechanisms which
|
||||
parse the command-line arguments and work out what code to call in
|
||||
response; if you don't need to change any of that behavior,
|
||||
consider using one of the subclasses defined in this file.
|
||||
|
||||
If you are interested in overriding/customizing various aspects of
|
||||
the command-parsing and -execution behavior, the normal flow works
|
||||
as follows:
|
||||
|
||||
1. ``django-admin`` or ``manage.py`` loads the command class
|
||||
and calls its ``run_from_argv()`` method.
|
||||
|
||||
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
|
||||
an ``ArgumentParser`` for the arguments, parses them, performs
|
||||
any environment changes requested by options like
|
||||
``pythonpath``, and then calls the ``execute()`` method,
|
||||
passing the parsed arguments.
|
||||
|
||||
3. The ``execute()`` method attempts to carry out the command by
|
||||
calling the ``handle()`` method with the parsed arguments; any
|
||||
output produced by ``handle()`` will be printed to standard
|
||||
output and, if the command is intended to produce a block of
|
||||
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
|
||||
|
||||
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
|
||||
``CommandError``), ``run_from_argv()`` will instead print an error
|
||||
message to ``stderr``.
|
||||
|
||||
Thus, the ``handle()`` method is typically the starting point for
|
||||
subclasses; many built-in commands and command types either place
|
||||
all of their logic in ``handle()``, or perform some additional
|
||||
parsing work in ``handle()`` and then delegate from it to more
|
||||
specialized methods as needed.
|
||||
|
||||
Several attributes affect behavior at various steps along the way:
|
||||
|
||||
``help``
|
||||
A short description of the command, which will be printed in
|
||||
help messages.
|
||||
|
||||
``output_transaction``
|
||||
A boolean indicating whether the command outputs SQL
|
||||
statements; if ``True``, the output will automatically be
|
||||
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
|
||||
``False``.
|
||||
|
||||
``requires_migrations_checks``
|
||||
A boolean; if ``True``, the command prints a warning if the set of
|
||||
migrations on disk don't match the migrations in the database.
|
||||
|
||||
``requires_system_checks``
|
||||
A list or tuple of tags, e.g. [Tags.staticfiles, Tags.models]. System
|
||||
checks registered in the chosen tags will be checked for errors prior
|
||||
to executing the command. The value '__all__' can be used to specify
|
||||
that all system checks should be performed. Default value is '__all__'.
|
||||
|
||||
To validate an individual application's models
|
||||
rather than all applications' models, call
|
||||
``self.check(app_configs)`` from ``handle()``, where ``app_configs``
|
||||
is the list of application's configuration provided by the
|
||||
app registry.
|
||||
|
||||
``stealth_options``
|
||||
A tuple of any options the command uses which aren't defined by the
|
||||
argument parser.
|
||||
"""
|
||||
# Metadata about this command.
|
||||
help = ''
|
||||
|
||||
# Configuration shortcuts that alter various logic.
|
||||
_called_from_command_line = False
|
||||
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
|
||||
requires_migrations_checks = False
|
||||
requires_system_checks = '__all__'
|
||||
# Arguments, common to all commands, which aren't defined by the argument
|
||||
# parser.
|
||||
base_stealth_options = ('stderr', 'stdout')
|
||||
# Command-specific options not defined by the argument parser.
|
||||
stealth_options = ()
|
||||
suppressed_base_arguments = set()
|
||||
|
||||
def __init__(self, stdout=None, stderr=None, no_color=False, force_color=False):
|
||||
self.stdout = OutputWrapper(stdout or sys.stdout)
|
||||
self.stderr = OutputWrapper(stderr or sys.stderr)
|
||||
if no_color and force_color:
|
||||
raise CommandError("'no_color' and 'force_color' can't be used together.")
|
||||
if no_color:
|
||||
self.style = no_style()
|
||||
else:
|
||||
self.style = color_style(force_color)
|
||||
self.stderr.style_func = self.style.ERROR
|
||||
if self.requires_system_checks in [False, True]:
|
||||
warnings.warn(
|
||||
"Using a boolean value for requires_system_checks is "
|
||||
"deprecated. Use '__all__' instead of True, and [] (an empty "
|
||||
"list) instead of False.",
|
||||
RemovedInDjango41Warning,
|
||||
)
|
||||
self.requires_system_checks = ALL_CHECKS if self.requires_system_checks else []
|
||||
if (
|
||||
not isinstance(self.requires_system_checks, (list, tuple)) and
|
||||
self.requires_system_checks != ALL_CHECKS
|
||||
):
|
||||
raise TypeError('requires_system_checks must be a list or tuple.')
|
||||
|
||||
def get_version(self):
|
||||
"""
|
||||
Return the Django version, which should be correct for all built-in
|
||||
Django commands. User-supplied commands can override this method to
|
||||
return their own version.
|
||||
"""
|
||||
return django.get_version()
|
||||
|
||||
def create_parser(self, prog_name, subcommand, **kwargs):
|
||||
"""
|
||||
Create and return the ``ArgumentParser`` which will be used to
|
||||
parse the arguments to this command.
|
||||
"""
|
||||
parser = CommandParser(
|
||||
prog='%s %s' % (os.path.basename(prog_name), subcommand),
|
||||
description=self.help or None,
|
||||
formatter_class=DjangoHelpFormatter,
|
||||
missing_args_message=getattr(self, 'missing_args_message', None),
|
||||
called_from_command_line=getattr(self, '_called_from_command_line', None),
|
||||
**kwargs
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--version', action='version', version=self.get_version(),
|
||||
help="Show program's version number and exit.",
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '-v', '--verbosity', default=1,
|
||||
type=int, choices=[0, 1, 2, 3],
|
||||
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output',
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--settings',
|
||||
help=(
|
||||
'The Python path to a settings module, e.g. '
|
||||
'"myproject.settings.main". If this isn\'t provided, the '
|
||||
'DJANGO_SETTINGS_MODULE environment variable will be used.'
|
||||
),
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--pythonpath',
|
||||
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".',
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--traceback', action='store_true',
|
||||
help='Raise on CommandError exceptions.',
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--no-color', action='store_true',
|
||||
help="Don't colorize the command output.",
|
||||
)
|
||||
self.add_base_argument(
|
||||
parser, '--force-color', action='store_true',
|
||||
help='Force colorization of the command output.',
|
||||
)
|
||||
if self.requires_system_checks:
|
||||
parser.add_argument(
|
||||
'--skip-checks', action='store_true',
|
||||
help='Skip system checks.',
|
||||
)
|
||||
self.add_arguments(parser)
|
||||
return parser
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""
|
||||
Entry point for subclassed commands to add custom arguments.
|
||||
"""
|
||||
pass
|
||||
|
||||
def add_base_argument(self, parser, *args, **kwargs):
|
||||
"""
|
||||
Call the parser's add_argument() method, suppressing the help text
|
||||
according to BaseCommand.suppressed_base_arguments.
|
||||
"""
|
||||
for arg in args:
|
||||
if arg in self.suppressed_base_arguments:
|
||||
kwargs['help'] = argparse.SUPPRESS
|
||||
break
|
||||
parser.add_argument(*args, **kwargs)
|
||||
|
||||
def print_help(self, prog_name, subcommand):
|
||||
"""
|
||||
Print the help message for this command, derived from
|
||||
``self.usage()``.
|
||||
"""
|
||||
parser = self.create_parser(prog_name, subcommand)
|
||||
parser.print_help()
|
||||
|
||||
def run_from_argv(self, argv):
|
||||
"""
|
||||
Set up any environment changes requested (e.g., Python path
|
||||
and Django settings), then run this command. If the
|
||||
command raises a ``CommandError``, intercept it and print it sensibly
|
||||
to stderr. If the ``--traceback`` option is present or the raised
|
||||
``Exception`` is not ``CommandError``, raise it.
|
||||
"""
|
||||
self._called_from_command_line = True
|
||||
parser = self.create_parser(argv[0], argv[1])
|
||||
|
||||
options = parser.parse_args(argv[2:])
|
||||
cmd_options = vars(options)
|
||||
# Move positional args out of options to mimic legacy optparse
|
||||
args = cmd_options.pop('args', ())
|
||||
handle_default_options(options)
|
||||
try:
|
||||
self.execute(*args, **cmd_options)
|
||||
except CommandError as e:
|
||||
if options.traceback:
|
||||
raise
|
||||
|
||||
# SystemCheckError takes care of its own formatting.
|
||||
if isinstance(e, SystemCheckError):
|
||||
self.stderr.write(str(e), lambda x: x)
|
||||
else:
|
||||
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
try:
|
||||
connections.close_all()
|
||||
except ImproperlyConfigured:
|
||||
# Ignore if connections aren't setup at this point (e.g. no
|
||||
# configured settings).
|
||||
pass
|
||||
|
||||
def execute(self, *args, **options):
|
||||
"""
|
||||
Try to execute this command, performing system checks if needed (as
|
||||
controlled by the ``requires_system_checks`` attribute, except if
|
||||
force-skipped).
|
||||
"""
|
||||
if options['force_color'] and options['no_color']:
|
||||
raise CommandError("The --no-color and --force-color options can't be used together.")
|
||||
if options['force_color']:
|
||||
self.style = color_style(force_color=True)
|
||||
elif options['no_color']:
|
||||
self.style = no_style()
|
||||
self.stderr.style_func = None
|
||||
if options.get('stdout'):
|
||||
self.stdout = OutputWrapper(options['stdout'])
|
||||
if options.get('stderr'):
|
||||
self.stderr = OutputWrapper(options['stderr'])
|
||||
|
||||
if self.requires_system_checks and not options['skip_checks']:
|
||||
if self.requires_system_checks == ALL_CHECKS:
|
||||
self.check()
|
||||
else:
|
||||
self.check(tags=self.requires_system_checks)
|
||||
if self.requires_migrations_checks:
|
||||
self.check_migrations()
|
||||
output = self.handle(*args, **options)
|
||||
if output:
|
||||
if self.output_transaction:
|
||||
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
|
||||
output = '%s\n%s\n%s' % (
|
||||
self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()),
|
||||
output,
|
||||
self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()),
|
||||
)
|
||||
self.stdout.write(output)
|
||||
return output
|
||||
|
||||
def check(self, app_configs=None, tags=None, display_num_errors=False,
|
||||
include_deployment_checks=False, fail_level=checks.ERROR,
|
||||
databases=None):
|
||||
"""
|
||||
Use the system check framework to validate entire Django project.
|
||||
Raise CommandError for any serious message (error or critical errors).
|
||||
If there are only light messages (like warnings), print them to stderr
|
||||
and don't raise an exception.
|
||||
"""
|
||||
all_issues = checks.run_checks(
|
||||
app_configs=app_configs,
|
||||
tags=tags,
|
||||
include_deployment_checks=include_deployment_checks,
|
||||
databases=databases,
|
||||
)
|
||||
|
||||
header, body, footer = "", "", ""
|
||||
visible_issue_count = 0 # excludes silenced warnings
|
||||
|
||||
if all_issues:
|
||||
debugs = [e for e in all_issues if e.level < checks.INFO and not e.is_silenced()]
|
||||
infos = [e for e in all_issues if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()]
|
||||
warnings = [e for e in all_issues if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()]
|
||||
errors = [e for e in all_issues if checks.ERROR <= e.level < checks.CRITICAL and not e.is_silenced()]
|
||||
criticals = [e for e in all_issues if checks.CRITICAL <= e.level and not e.is_silenced()]
|
||||
sorted_issues = [
|
||||
(criticals, 'CRITICALS'),
|
||||
(errors, 'ERRORS'),
|
||||
(warnings, 'WARNINGS'),
|
||||
(infos, 'INFOS'),
|
||||
(debugs, 'DEBUGS'),
|
||||
]
|
||||
|
||||
for issues, group_name in sorted_issues:
|
||||
if issues:
|
||||
visible_issue_count += len(issues)
|
||||
formatted = (
|
||||
self.style.ERROR(str(e))
|
||||
if e.is_serious()
|
||||
else self.style.WARNING(str(e))
|
||||
for e in issues)
|
||||
formatted = "\n".join(sorted(formatted))
|
||||
body += '\n%s:\n%s\n' % (group_name, formatted)
|
||||
|
||||
if visible_issue_count:
|
||||
header = "System check identified some issues:\n"
|
||||
|
||||
if display_num_errors:
|
||||
if visible_issue_count:
|
||||
footer += '\n'
|
||||
footer += "System check identified %s (%s silenced)." % (
|
||||
"no issues" if visible_issue_count == 0 else
|
||||
"1 issue" if visible_issue_count == 1 else
|
||||
"%s issues" % visible_issue_count,
|
||||
len(all_issues) - visible_issue_count,
|
||||
)
|
||||
|
||||
if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues):
|
||||
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
|
||||
raise SystemCheckError(msg)
|
||||
else:
|
||||
msg = header + body + footer
|
||||
|
||||
if msg:
|
||||
if visible_issue_count:
|
||||
self.stderr.write(msg, lambda x: x)
|
||||
else:
|
||||
self.stdout.write(msg)
|
||||
|
||||
def check_migrations(self):
|
||||
"""
|
||||
Print a warning if the set of migrations on disk don't match the
|
||||
migrations in the database.
|
||||
"""
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
try:
|
||||
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
|
||||
except ImproperlyConfigured:
|
||||
# No databases are configured (or the dummy one)
|
||||
return
|
||||
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if plan:
|
||||
apps_waiting_migration = sorted({migration.app_label for migration, backwards in plan})
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
"\nYou have %(unapplied_migration_count)s unapplied migration(s). "
|
||||
"Your project may not work properly until you apply the "
|
||||
"migrations for app(s): %(apps_waiting_migration)s." % {
|
||||
"unapplied_migration_count": len(plan),
|
||||
"apps_waiting_migration": ", ".join(apps_waiting_migration),
|
||||
}
|
||||
)
|
||||
)
|
||||
self.stdout.write(self.style.NOTICE("Run 'python manage.py migrate' to apply them."))
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""
|
||||
The actual logic of the command. Subclasses must implement
|
||||
this method.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
|
||||
|
||||
|
||||
class AppCommand(BaseCommand):
|
||||
"""
|
||||
A management command which takes one or more installed application labels
|
||||
as arguments, and does something with each of them.
|
||||
|
||||
Rather than implementing ``handle()``, subclasses must implement
|
||||
``handle_app_config()``, which will be called once for each application.
|
||||
"""
|
||||
missing_args_message = "Enter at least one application label."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('args', metavar='app_label', nargs='+', help='One or more application label.')
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
from django.apps import apps
|
||||
try:
|
||||
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
|
||||
except (LookupError, ImportError) as e:
|
||||
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
|
||||
output = []
|
||||
for app_config in app_configs:
|
||||
app_output = self.handle_app_config(app_config, **options)
|
||||
if app_output:
|
||||
output.append(app_output)
|
||||
return '\n'.join(output)
|
||||
|
||||
def handle_app_config(self, app_config, **options):
|
||||
"""
|
||||
Perform the command's actions for app_config, an AppConfig instance
|
||||
corresponding to an application label given on the command line.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses of AppCommand must provide"
|
||||
"a handle_app_config() method.")
|
||||
|
||||
|
||||
class LabelCommand(BaseCommand):
|
||||
"""
|
||||
A management command which takes one or more arbitrary arguments
|
||||
(labels) on the command line, and does something with each of
|
||||
them.
|
||||
|
||||
Rather than implementing ``handle()``, subclasses must implement
|
||||
``handle_label()``, which will be called once for each label.
|
||||
|
||||
If the arguments should be names of installed applications, use
|
||||
``AppCommand`` instead.
|
||||
"""
|
||||
label = 'label'
|
||||
missing_args_message = "Enter at least one %s." % label
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('args', metavar=self.label, nargs='+')
|
||||
|
||||
def handle(self, *labels, **options):
|
||||
output = []
|
||||
for label in labels:
|
||||
label_output = self.handle_label(label, **options)
|
||||
if label_output:
|
||||
output.append(label_output)
|
||||
return '\n'.join(output)
|
||||
|
||||
def handle_label(self, label, **options):
|
||||
"""
|
||||
Perform the command's actions for ``label``, which will be the
|
||||
string as given on the command line.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method')
|
107
venv/Lib/site-packages/django/core/management/color.py
Normal file
107
venv/Lib/site-packages/django/core/management/color.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""
|
||||
Sets up the terminal color scheme.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
|
||||
from django.utils import termcolors
|
||||
|
||||
try:
|
||||
import colorama
|
||||
colorama.init()
|
||||
except (ImportError, OSError):
|
||||
HAS_COLORAMA = False
|
||||
else:
|
||||
HAS_COLORAMA = True
|
||||
|
||||
|
||||
def supports_color():
|
||||
"""
|
||||
Return True if the running system's terminal supports color,
|
||||
and False otherwise.
|
||||
"""
|
||||
def vt_codes_enabled_in_windows_registry():
|
||||
"""
|
||||
Check the Windows Registry to see if VT code handling has been enabled
|
||||
by default, see https://superuser.com/a/1300251/447564.
|
||||
"""
|
||||
try:
|
||||
# winreg is only available on Windows.
|
||||
import winreg
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 'Console')
|
||||
try:
|
||||
reg_key_value, _ = winreg.QueryValueEx(reg_key, 'VirtualTerminalLevel')
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
else:
|
||||
return reg_key_value == 1
|
||||
|
||||
# isatty is not always implemented, #6223.
|
||||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||
|
||||
return is_a_tty and (
|
||||
sys.platform != 'win32' or
|
||||
HAS_COLORAMA or
|
||||
'ANSICON' in os.environ or
|
||||
# Windows Terminal supports VT codes.
|
||||
'WT_SESSION' in os.environ or
|
||||
# Microsoft Visual Studio Code's built-in terminal supports colors.
|
||||
os.environ.get('TERM_PROGRAM') == 'vscode' or
|
||||
vt_codes_enabled_in_windows_registry()
|
||||
)
|
||||
|
||||
|
||||
class Style:
|
||||
pass
|
||||
|
||||
|
||||
def make_style(config_string=''):
|
||||
"""
|
||||
Create a Style object from the given config_string.
|
||||
|
||||
If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used.
|
||||
"""
|
||||
|
||||
style = Style()
|
||||
|
||||
color_settings = termcolors.parse_color_setting(config_string)
|
||||
|
||||
# The nocolor palette has all available roles.
|
||||
# Use that palette as the basis for populating
|
||||
# the palette as defined in the environment.
|
||||
for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
|
||||
if color_settings:
|
||||
format = color_settings.get(role, {})
|
||||
style_func = termcolors.make_style(**format)
|
||||
else:
|
||||
def style_func(x):
|
||||
return x
|
||||
setattr(style, role, style_func)
|
||||
|
||||
# For backwards compatibility,
|
||||
# set style for ERROR_OUTPUT == ERROR
|
||||
style.ERROR_OUTPUT = style.ERROR
|
||||
|
||||
return style
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def no_style():
|
||||
"""
|
||||
Return a Style object with no color scheme.
|
||||
"""
|
||||
return make_style('nocolor')
|
||||
|
||||
|
||||
def color_style(force_color=False):
|
||||
"""
|
||||
Return a Style object from the Django color scheme.
|
||||
"""
|
||||
if not force_color and not supports_color():
|
||||
return no_style()
|
||||
return make_style(os.environ.get('DJANGO_COLORS', ''))
|
@@ -0,0 +1,70 @@
|
||||
from django.apps import apps
|
||||
from django.core import checks
|
||||
from django.core.checks.registry import registry
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Checks the entire Django project for potential problems."
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('args', metavar='app_label', nargs='*')
|
||||
parser.add_argument(
|
||||
'--tag', '-t', action='append', dest='tags',
|
||||
help='Run only checks labeled with given tag.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--list-tags', action='store_true',
|
||||
help='List available tags.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--deploy', action='store_true',
|
||||
help='Check deployment settings.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fail-level',
|
||||
default='ERROR',
|
||||
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
|
||||
help=(
|
||||
'Message level that will cause the command to exit with a '
|
||||
'non-zero status. Default is ERROR.'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database', action='append', dest='databases',
|
||||
help='Run database related checks against these aliases.',
|
||||
)
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
include_deployment_checks = options['deploy']
|
||||
if options['list_tags']:
|
||||
self.stdout.write('\n'.join(sorted(registry.tags_available(include_deployment_checks))))
|
||||
return
|
||||
|
||||
if app_labels:
|
||||
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
|
||||
else:
|
||||
app_configs = None
|
||||
|
||||
tags = options['tags']
|
||||
if tags:
|
||||
try:
|
||||
invalid_tag = next(
|
||||
tag for tag in tags if not checks.tag_exists(tag, include_deployment_checks)
|
||||
)
|
||||
except StopIteration:
|
||||
# no invalid tags
|
||||
pass
|
||||
else:
|
||||
raise CommandError('There is no system check with the "%s" tag.' % invalid_tag)
|
||||
|
||||
self.check(
|
||||
app_configs=app_configs,
|
||||
tags=tags,
|
||||
display_num_errors=True,
|
||||
include_deployment_checks=include_deployment_checks,
|
||||
fail_level=getattr(checks, options['fail_level']),
|
||||
databases=options['databases'],
|
||||
)
|
@@ -0,0 +1,168 @@
|
||||
import codecs
|
||||
import concurrent.futures
|
||||
import glob
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.utils import (
|
||||
find_command, is_ignored_path, popen_wrapper,
|
||||
)
|
||||
|
||||
|
||||
def has_bom(fn):
|
||||
with fn.open('rb') as f:
|
||||
sample = f.read(4)
|
||||
return sample.startswith((codecs.BOM_UTF8, codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE))
|
||||
|
||||
|
||||
def is_writable(path):
|
||||
# Known side effect: updating file access/modified time to current time if
|
||||
# it is writable.
|
||||
try:
|
||||
with open(path, 'a'):
|
||||
os.utime(path, None)
|
||||
except OSError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
program = 'msgfmt'
|
||||
program_options = ['--check-format']
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--locale', '-l', action='append', default=[],
|
||||
help='Locale(s) to process (e.g. de_AT). Default is to process all. '
|
||||
'Can be used multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--exclude', '-x', action='append', default=[],
|
||||
help='Locales to exclude. Default is none. Can be used multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--use-fuzzy', '-f', dest='fuzzy', action='store_true',
|
||||
help='Use fuzzy translations.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ignore', '-i', action='append', dest='ignore_patterns',
|
||||
default=[], metavar='PATTERN',
|
||||
help='Ignore directories matching this glob-style pattern. '
|
||||
'Use multiple times to ignore more.',
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
locale = options['locale']
|
||||
exclude = options['exclude']
|
||||
ignore_patterns = set(options['ignore_patterns'])
|
||||
self.verbosity = options['verbosity']
|
||||
if options['fuzzy']:
|
||||
self.program_options = self.program_options + ['-f']
|
||||
|
||||
if find_command(self.program) is None:
|
||||
raise CommandError("Can't find %s. Make sure you have GNU gettext "
|
||||
"tools 0.15 or newer installed." % self.program)
|
||||
|
||||
basedirs = [os.path.join('conf', 'locale'), 'locale']
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE'):
|
||||
from django.conf import settings
|
||||
basedirs.extend(settings.LOCALE_PATHS)
|
||||
|
||||
# Walk entire tree, looking for locale directories
|
||||
for dirpath, dirnames, filenames in os.walk('.', topdown=True):
|
||||
for dirname in dirnames:
|
||||
if is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns):
|
||||
dirnames.remove(dirname)
|
||||
elif dirname == 'locale':
|
||||
basedirs.append(os.path.join(dirpath, dirname))
|
||||
|
||||
# Gather existing directories.
|
||||
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
|
||||
|
||||
if not basedirs:
|
||||
raise CommandError("This script should be run from the Django Git "
|
||||
"checkout or your project or app tree, or with "
|
||||
"the settings module specified.")
|
||||
|
||||
# Build locale list
|
||||
all_locales = []
|
||||
for basedir in basedirs:
|
||||
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % basedir))
|
||||
all_locales.extend(map(os.path.basename, locale_dirs))
|
||||
|
||||
# Account for excluded locales
|
||||
locales = locale or all_locales
|
||||
locales = set(locales).difference(exclude)
|
||||
|
||||
self.has_errors = False
|
||||
for basedir in basedirs:
|
||||
if locales:
|
||||
dirs = [os.path.join(basedir, locale, 'LC_MESSAGES') for locale in locales]
|
||||
else:
|
||||
dirs = [basedir]
|
||||
locations = []
|
||||
for ldir in dirs:
|
||||
for dirpath, dirnames, filenames in os.walk(ldir):
|
||||
locations.extend((dirpath, f) for f in filenames if f.endswith('.po'))
|
||||
if locations:
|
||||
self.compile_messages(locations)
|
||||
|
||||
if self.has_errors:
|
||||
raise CommandError('compilemessages generated one or more errors.')
|
||||
|
||||
def compile_messages(self, locations):
|
||||
"""
|
||||
Locations is a list of tuples: [(directory, file), ...]
|
||||
"""
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
futures = []
|
||||
for i, (dirpath, f) in enumerate(locations):
|
||||
po_path = Path(dirpath) / f
|
||||
mo_path = po_path.with_suffix('.mo')
|
||||
try:
|
||||
if mo_path.stat().st_mtime >= po_path.stat().st_mtime:
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(
|
||||
'File “%s” is already compiled and up to date.'
|
||||
% po_path
|
||||
)
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write('processing file %s in %s' % (f, dirpath))
|
||||
|
||||
if has_bom(po_path):
|
||||
self.stderr.write(
|
||||
'The %s file has a BOM (Byte Order Mark). Django only '
|
||||
'supports .po files encoded in UTF-8 and without any BOM.' % po_path
|
||||
)
|
||||
self.has_errors = True
|
||||
continue
|
||||
|
||||
# Check writability on first location
|
||||
if i == 0 and not is_writable(mo_path):
|
||||
self.stderr.write(
|
||||
'The po files under %s are in a seemingly not writable location. '
|
||||
'mo files will not be updated/created.' % dirpath
|
||||
)
|
||||
self.has_errors = True
|
||||
return
|
||||
|
||||
args = [self.program, *self.program_options, '-o', mo_path, po_path]
|
||||
futures.append(executor.submit(popen_wrapper, args))
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
output, errors, status = future.result()
|
||||
if status:
|
||||
if self.verbosity > 0:
|
||||
if errors:
|
||||
self.stderr.write("Execution of %s failed: %s" % (self.program, errors))
|
||||
else:
|
||||
self.stderr.write("Execution of %s failed" % self.program)
|
||||
self.has_errors = True
|
@@ -0,0 +1,107 @@
|
||||
from django.conf import settings
|
||||
from django.core.cache import caches
|
||||
from django.core.cache.backends.db import BaseDatabaseCache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import (
|
||||
DEFAULT_DB_ALIAS, DatabaseError, connections, models, router, transaction,
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates the tables needed to use the SQL cache backend."
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'args', metavar='table_name', nargs='*',
|
||||
help='Optional table names. Otherwise, settings.CACHES is used to find cache tables.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database',
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database onto which the cache tables will be '
|
||||
'installed. Defaults to the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run', action='store_true',
|
||||
help='Does not create the table, just prints the SQL that would be run.',
|
||||
)
|
||||
|
||||
def handle(self, *tablenames, **options):
|
||||
db = options['database']
|
||||
self.verbosity = options['verbosity']
|
||||
dry_run = options['dry_run']
|
||||
if tablenames:
|
||||
# Legacy behavior, tablename specified as argument
|
||||
for tablename in tablenames:
|
||||
self.create_table(db, tablename, dry_run)
|
||||
else:
|
||||
for cache_alias in settings.CACHES:
|
||||
cache = caches[cache_alias]
|
||||
if isinstance(cache, BaseDatabaseCache):
|
||||
self.create_table(db, cache._table, dry_run)
|
||||
|
||||
def create_table(self, database, tablename, dry_run):
|
||||
cache = BaseDatabaseCache(tablename, {})
|
||||
if not router.allow_migrate_model(database, cache.cache_model_class):
|
||||
return
|
||||
connection = connections[database]
|
||||
|
||||
if tablename in connection.introspection.table_names():
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write("Cache table '%s' already exists." % tablename)
|
||||
return
|
||||
|
||||
fields = (
|
||||
# "key" is a reserved word in MySQL, so use "cache_key" instead.
|
||||
models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
|
||||
models.TextField(name='value'),
|
||||
models.DateTimeField(name='expires', db_index=True),
|
||||
)
|
||||
table_output = []
|
||||
index_output = []
|
||||
qn = connection.ops.quote_name
|
||||
for f in fields:
|
||||
field_output = [
|
||||
qn(f.name),
|
||||
f.db_type(connection=connection),
|
||||
'%sNULL' % ('NOT ' if not f.null else ''),
|
||||
]
|
||||
if f.primary_key:
|
||||
field_output.append("PRIMARY KEY")
|
||||
elif f.unique:
|
||||
field_output.append("UNIQUE")
|
||||
if f.db_index:
|
||||
unique = "UNIQUE " if f.unique else ""
|
||||
index_output.append(
|
||||
"CREATE %sINDEX %s ON %s (%s);" %
|
||||
(unique, qn('%s_%s' % (tablename, f.name)), qn(tablename), qn(f.name))
|
||||
)
|
||||
table_output.append(" ".join(field_output))
|
||||
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
|
||||
for i, line in enumerate(table_output):
|
||||
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
|
||||
full_statement.append(');')
|
||||
|
||||
full_statement = "\n".join(full_statement)
|
||||
|
||||
if dry_run:
|
||||
self.stdout.write(full_statement)
|
||||
for statement in index_output:
|
||||
self.stdout.write(statement)
|
||||
return
|
||||
|
||||
with transaction.atomic(using=database, savepoint=connection.features.can_rollback_ddl):
|
||||
with connection.cursor() as curs:
|
||||
try:
|
||||
curs.execute(full_statement)
|
||||
except DatabaseError as e:
|
||||
raise CommandError(
|
||||
"Cache table '%s' could not be created.\nThe error was: %s." %
|
||||
(tablename, e))
|
||||
for statement in index_output:
|
||||
curs.execute(statement)
|
||||
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write("Cache table '%s' created." % tablename)
|
@@ -0,0 +1,43 @@
|
||||
import subprocess
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Runs the command-line client for specified database, or the "
|
||||
"default database if none is provided."
|
||||
)
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database onto which to open a shell. Defaults to the "default" database.',
|
||||
)
|
||||
parameters = parser.add_argument_group('parameters', prefix_chars='--')
|
||||
parameters.add_argument('parameters', nargs='*')
|
||||
|
||||
def handle(self, **options):
|
||||
connection = connections[options['database']]
|
||||
try:
|
||||
connection.client.runshell(options['parameters'])
|
||||
except FileNotFoundError:
|
||||
# Note that we're assuming the FileNotFoundError relates to the
|
||||
# command missing. It could be raised for some other reason, in
|
||||
# which case this error message would be inaccurate. Still, this
|
||||
# message catches the common case.
|
||||
raise CommandError(
|
||||
'You appear not to have the %r program installed or on your path.' %
|
||||
connection.client.executable_name
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise CommandError(
|
||||
'"%s" returned non-zero exit status %s.' % (
|
||||
' '.join(e.cmd),
|
||||
e.returncode,
|
||||
),
|
||||
returncode=e.returncode,
|
||||
)
|
@@ -0,0 +1,79 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
def module_to_dict(module, omittable=lambda k: k.startswith('_') or not k.isupper()):
|
||||
"""Convert a module namespace to a Python dictionary."""
|
||||
return {k: repr(getattr(module, k)) for k in dir(module) if not omittable(k)}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Displays differences between the current settings.py and Django's
|
||||
default settings."""
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--all', action='store_true',
|
||||
help=(
|
||||
'Display all settings, regardless of their value. In "hash" '
|
||||
'mode, default values are prefixed by "###".'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--default', metavar='MODULE',
|
||||
help=(
|
||||
"The settings module to compare the current settings against. Leave empty to "
|
||||
"compare against Django's default settings."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--output', default='hash', choices=('hash', 'unified'),
|
||||
help=(
|
||||
"Selects the output format. 'hash' mode displays each changed "
|
||||
"setting, with the settings that don't appear in the defaults "
|
||||
"followed by ###. 'unified' mode prefixes the default setting "
|
||||
"with a minus sign, followed by the changed setting prefixed "
|
||||
"with a plus sign."
|
||||
),
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
from django.conf import Settings, global_settings, settings
|
||||
|
||||
# Because settings are imported lazily, we need to explicitly load them.
|
||||
if not settings.configured:
|
||||
settings._setup()
|
||||
|
||||
user_settings = module_to_dict(settings._wrapped)
|
||||
default = options['default']
|
||||
default_settings = module_to_dict(Settings(default) if default else global_settings)
|
||||
output_func = {
|
||||
'hash': self.output_hash,
|
||||
'unified': self.output_unified,
|
||||
}[options['output']]
|
||||
return '\n'.join(output_func(user_settings, default_settings, **options))
|
||||
|
||||
def output_hash(self, user_settings, default_settings, **options):
|
||||
# Inspired by Postfix's "postconf -n".
|
||||
output = []
|
||||
for key in sorted(user_settings):
|
||||
if key not in default_settings:
|
||||
output.append("%s = %s ###" % (key, user_settings[key]))
|
||||
elif user_settings[key] != default_settings[key]:
|
||||
output.append("%s = %s" % (key, user_settings[key]))
|
||||
elif options['all']:
|
||||
output.append("### %s = %s" % (key, user_settings[key]))
|
||||
return output
|
||||
|
||||
def output_unified(self, user_settings, default_settings, **options):
|
||||
output = []
|
||||
for key in sorted(user_settings):
|
||||
if key not in default_settings:
|
||||
output.append(self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])))
|
||||
elif user_settings[key] != default_settings[key]:
|
||||
output.append(self.style.ERROR("- %s = %s" % (key, default_settings[key])))
|
||||
output.append(self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])))
|
||||
elif options['all']:
|
||||
output.append(" %s = %s" % (key, user_settings[key]))
|
||||
return output
|
@@ -0,0 +1,245 @@
|
||||
import gzip
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from django.apps import apps
|
||||
from django.core import serializers
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.utils import parse_apps_and_model_labels
|
||||
from django.db import DEFAULT_DB_ALIAS, router
|
||||
|
||||
try:
|
||||
import bz2
|
||||
has_bz2 = True
|
||||
except ImportError:
|
||||
has_bz2 = False
|
||||
|
||||
try:
|
||||
import lzma
|
||||
has_lzma = True
|
||||
except ImportError:
|
||||
has_lzma = False
|
||||
|
||||
|
||||
class ProxyModelWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Output the contents of the database as a fixture of the given format "
|
||||
"(using each model's default manager unless --all is specified)."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'args', metavar='app_label[.ModelName]', nargs='*',
|
||||
help='Restricts dumped data to the specified app_label or app_label.ModelName.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--format', default='json',
|
||||
help='Specifies the output serialization format for fixtures.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--indent', type=int,
|
||||
help='Specifies the indent level to use when pretty-printing output.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database',
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a specific database to dump fixtures from. '
|
||||
'Defaults to the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append', default=[],
|
||||
help='An app_label or app_label.ModelName to exclude '
|
||||
'(use multiple --exclude to exclude multiple apps/models).',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--natural-foreign', action='store_true', dest='use_natural_foreign_keys',
|
||||
help='Use natural foreign keys if they are available.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--natural-primary', action='store_true', dest='use_natural_primary_keys',
|
||||
help='Use natural primary keys if they are available.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-a', '--all', action='store_true', dest='use_base_manager',
|
||||
help="Use Django's base manager to dump all models stored in the database, "
|
||||
"including those that would otherwise be filtered or modified by a custom manager.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pks', dest='primary_keys',
|
||||
help="Only dump objects with given primary keys. Accepts a comma-separated "
|
||||
"list of keys. This option only works when you specify one model.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output',
|
||||
help='Specifies file to which the output is written.'
|
||||
)
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
format = options['format']
|
||||
indent = options['indent']
|
||||
using = options['database']
|
||||
excludes = options['exclude']
|
||||
output = options['output']
|
||||
show_traceback = options['traceback']
|
||||
use_natural_foreign_keys = options['use_natural_foreign_keys']
|
||||
use_natural_primary_keys = options['use_natural_primary_keys']
|
||||
use_base_manager = options['use_base_manager']
|
||||
pks = options['primary_keys']
|
||||
|
||||
if pks:
|
||||
primary_keys = [pk.strip() for pk in pks.split(',')]
|
||||
else:
|
||||
primary_keys = []
|
||||
|
||||
excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
|
||||
|
||||
if not app_labels:
|
||||
if primary_keys:
|
||||
raise CommandError("You can only use --pks option with one model")
|
||||
app_list = dict.fromkeys(
|
||||
app_config for app_config in apps.get_app_configs()
|
||||
if app_config.models_module is not None and app_config not in excluded_apps
|
||||
)
|
||||
else:
|
||||
if len(app_labels) > 1 and primary_keys:
|
||||
raise CommandError("You can only use --pks option with one model")
|
||||
app_list = {}
|
||||
for label in app_labels:
|
||||
try:
|
||||
app_label, model_label = label.split('.')
|
||||
try:
|
||||
app_config = apps.get_app_config(app_label)
|
||||
except LookupError as e:
|
||||
raise CommandError(str(e))
|
||||
if app_config.models_module is None or app_config in excluded_apps:
|
||||
continue
|
||||
try:
|
||||
model = app_config.get_model(model_label)
|
||||
except LookupError:
|
||||
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
|
||||
|
||||
app_list_value = app_list.setdefault(app_config, [])
|
||||
|
||||
# We may have previously seen an "all-models" request for
|
||||
# this app (no model qualifier was given). In this case
|
||||
# there is no need adding specific models to the list.
|
||||
if app_list_value is not None and model not in app_list_value:
|
||||
app_list_value.append(model)
|
||||
except ValueError:
|
||||
if primary_keys:
|
||||
raise CommandError("You can only use --pks option with one model")
|
||||
# This is just an app - no model qualifier
|
||||
app_label = label
|
||||
try:
|
||||
app_config = apps.get_app_config(app_label)
|
||||
except LookupError as e:
|
||||
raise CommandError(str(e))
|
||||
if app_config.models_module is None or app_config in excluded_apps:
|
||||
continue
|
||||
app_list[app_config] = None
|
||||
|
||||
# Check that the serialization format exists; this is a shortcut to
|
||||
# avoid collating all the objects and _then_ failing.
|
||||
if format not in serializers.get_public_serializer_formats():
|
||||
try:
|
||||
serializers.get_serializer(format)
|
||||
except serializers.SerializerDoesNotExist:
|
||||
pass
|
||||
|
||||
raise CommandError("Unknown serialization format: %s" % format)
|
||||
|
||||
def get_objects(count_only=False):
|
||||
"""
|
||||
Collate the objects to be serialized. If count_only is True, just
|
||||
count the number of objects to be serialized.
|
||||
"""
|
||||
if use_natural_foreign_keys:
|
||||
models = serializers.sort_dependencies(app_list.items(), allow_cycles=True)
|
||||
else:
|
||||
# There is no need to sort dependencies when natural foreign
|
||||
# keys are not used.
|
||||
models = []
|
||||
for (app_config, model_list) in app_list.items():
|
||||
if model_list is None:
|
||||
models.extend(app_config.get_models())
|
||||
else:
|
||||
models.extend(model_list)
|
||||
for model in models:
|
||||
if model in excluded_models:
|
||||
continue
|
||||
if model._meta.proxy and model._meta.proxy_for_model not in models:
|
||||
warnings.warn(
|
||||
"%s is a proxy model and won't be serialized." % model._meta.label,
|
||||
category=ProxyModelWarning,
|
||||
)
|
||||
if not model._meta.proxy and router.allow_migrate_model(using, model):
|
||||
if use_base_manager:
|
||||
objects = model._base_manager
|
||||
else:
|
||||
objects = model._default_manager
|
||||
|
||||
queryset = objects.using(using).order_by(model._meta.pk.name)
|
||||
if primary_keys:
|
||||
queryset = queryset.filter(pk__in=primary_keys)
|
||||
if count_only:
|
||||
yield queryset.order_by().count()
|
||||
else:
|
||||
yield from queryset.iterator()
|
||||
|
||||
try:
|
||||
self.stdout.ending = None
|
||||
progress_output = None
|
||||
object_count = 0
|
||||
# If dumpdata is outputting to stdout, there is no way to display progress
|
||||
if output and self.stdout.isatty() and options['verbosity'] > 0:
|
||||
progress_output = self.stdout
|
||||
object_count = sum(get_objects(count_only=True))
|
||||
if output:
|
||||
file_root, file_ext = os.path.splitext(output)
|
||||
compression_formats = {
|
||||
'.bz2': (open, {}, file_root),
|
||||
'.gz': (gzip.open, {}, output),
|
||||
'.lzma': (open, {}, file_root),
|
||||
'.xz': (open, {}, file_root),
|
||||
'.zip': (open, {}, file_root),
|
||||
}
|
||||
if has_bz2:
|
||||
compression_formats['.bz2'] = (bz2.open, {}, output)
|
||||
if has_lzma:
|
||||
compression_formats['.lzma'] = (
|
||||
lzma.open, {'format': lzma.FORMAT_ALONE}, output
|
||||
)
|
||||
compression_formats['.xz'] = (lzma.open, {}, output)
|
||||
try:
|
||||
open_method, kwargs, file_path = compression_formats[file_ext]
|
||||
except KeyError:
|
||||
open_method, kwargs, file_path = (open, {}, output)
|
||||
if file_path != output:
|
||||
file_name = os.path.basename(file_path)
|
||||
warnings.warn(
|
||||
f"Unsupported file extension ({file_ext}). "
|
||||
f"Fixtures saved in '{file_name}'.",
|
||||
RuntimeWarning,
|
||||
)
|
||||
stream = open_method(file_path, 'wt', **kwargs)
|
||||
else:
|
||||
stream = None
|
||||
try:
|
||||
serializers.serialize(
|
||||
format, get_objects(), indent=indent,
|
||||
use_natural_foreign_keys=use_natural_foreign_keys,
|
||||
use_natural_primary_keys=use_natural_primary_keys,
|
||||
stream=stream or self.stdout, progress_output=progress_output,
|
||||
object_count=object_count,
|
||||
)
|
||||
finally:
|
||||
if stream:
|
||||
stream.close()
|
||||
except Exception as e:
|
||||
if show_traceback:
|
||||
raise
|
||||
raise CommandError("Unable to serialize database: %s" % e)
|
@@ -0,0 +1,82 @@
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.core.management.sql import emit_post_migrate_signal, sql_flush
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Removes ALL DATA from the database, including data added during '
|
||||
'migrations. Does not achieve a "fresh install" state.'
|
||||
)
|
||||
stealth_options = ('reset_sequences', 'allow_cascade', 'inhibit_post_migrate')
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to flush. Defaults to the "default" database.',
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
database = options['database']
|
||||
connection = connections[database]
|
||||
verbosity = options['verbosity']
|
||||
interactive = options['interactive']
|
||||
# The following are stealth options used by Django's internals.
|
||||
reset_sequences = options.get('reset_sequences', True)
|
||||
allow_cascade = options.get('allow_cascade', False)
|
||||
inhibit_post_migrate = options.get('inhibit_post_migrate', False)
|
||||
|
||||
self.style = no_style()
|
||||
|
||||
# Import the 'management' module within each installed app, to register
|
||||
# dispatcher events.
|
||||
for app_config in apps.get_app_configs():
|
||||
try:
|
||||
import_module('.management', app_config.name)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
sql_list = sql_flush(self.style, connection,
|
||||
reset_sequences=reset_sequences,
|
||||
allow_cascade=allow_cascade)
|
||||
|
||||
if interactive:
|
||||
confirm = input("""You have requested a flush of the database.
|
||||
This will IRREVERSIBLY DESTROY all data currently in the "%s" database,
|
||||
and return each table to an empty state.
|
||||
Are you sure you want to do this?
|
||||
|
||||
Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
|
||||
else:
|
||||
confirm = 'yes'
|
||||
|
||||
if confirm == 'yes':
|
||||
try:
|
||||
connection.ops.execute_sql_flush(sql_list)
|
||||
except Exception as exc:
|
||||
raise CommandError(
|
||||
"Database %s couldn't be flushed. Possible reasons:\n"
|
||||
" * The database isn't running or isn't configured correctly.\n"
|
||||
" * At least one of the expected database tables doesn't exist.\n"
|
||||
" * The SQL was invalid.\n"
|
||||
"Hint: Look at the output of 'django-admin sqlflush'. "
|
||||
"That's the SQL this command wasn't able to run." % (
|
||||
connection.settings_dict['NAME'],
|
||||
)
|
||||
) from exc
|
||||
|
||||
# Empty sql_list may signify an empty database and post_migrate would then crash
|
||||
if sql_list and not inhibit_post_migrate:
|
||||
# Emit the post migrate signal. This allows individual applications to
|
||||
# respond as if the database had been migrated from scratch.
|
||||
emit_post_migrate_signal(verbosity, interactive, database)
|
||||
else:
|
||||
self.stdout.write('Flush cancelled.')
|
@@ -0,0 +1,299 @@
|
||||
import keyword
|
||||
import re
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
from django.db.models.constants import LOOKUP_SEP
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Introspects the database tables in the given database and outputs a Django model module."
|
||||
requires_system_checks = []
|
||||
stealth_options = ('table_name_filter',)
|
||||
db_module = 'django.db'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'table', nargs='*', type=str,
|
||||
help='Selects what tables or views should be introspected.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to introspect. Defaults to using the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--include-partitions', action='store_true', help='Also output models for partition tables.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--include-views', action='store_true', help='Also output models for database views.',
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
try:
|
||||
for line in self.handle_inspection(options):
|
||||
self.stdout.write(line)
|
||||
except NotImplementedError:
|
||||
raise CommandError("Database inspection isn't supported for the currently selected database backend.")
|
||||
|
||||
def handle_inspection(self, options):
|
||||
connection = connections[options['database']]
|
||||
# 'table_name_filter' is a stealth option
|
||||
table_name_filter = options.get('table_name_filter')
|
||||
|
||||
def table2model(table_name):
|
||||
return re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
yield "# This is an auto-generated Django model module."
|
||||
yield "# You'll have to do the following manually to clean this up:"
|
||||
yield "# * Rearrange models' order"
|
||||
yield "# * Make sure each model has one field with primary_key=True"
|
||||
yield "# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior"
|
||||
yield (
|
||||
"# * Remove `managed = False` lines if you wish to allow "
|
||||
"Django to create, modify, and delete the table"
|
||||
)
|
||||
yield "# Feel free to rename the models, but don't rename db_table values or field names."
|
||||
yield 'from %s import models' % self.db_module
|
||||
known_models = []
|
||||
table_info = connection.introspection.get_table_list(cursor)
|
||||
|
||||
# Determine types of tables and/or views to be introspected.
|
||||
types = {'t'}
|
||||
if options['include_partitions']:
|
||||
types.add('p')
|
||||
if options['include_views']:
|
||||
types.add('v')
|
||||
|
||||
for table_name in (options['table'] or sorted(info.name for info in table_info if info.type in types)):
|
||||
if table_name_filter is not None and callable(table_name_filter):
|
||||
if not table_name_filter(table_name):
|
||||
continue
|
||||
try:
|
||||
try:
|
||||
relations = connection.introspection.get_relations(cursor, table_name)
|
||||
except NotImplementedError:
|
||||
relations = {}
|
||||
try:
|
||||
constraints = connection.introspection.get_constraints(cursor, table_name)
|
||||
except NotImplementedError:
|
||||
constraints = {}
|
||||
primary_key_column = connection.introspection.get_primary_key_column(cursor, table_name)
|
||||
unique_columns = [
|
||||
c['columns'][0] for c in constraints.values()
|
||||
if c['unique'] and len(c['columns']) == 1
|
||||
]
|
||||
table_description = connection.introspection.get_table_description(cursor, table_name)
|
||||
except Exception as e:
|
||||
yield "# Unable to inspect table '%s'" % table_name
|
||||
yield "# The error was: %s" % e
|
||||
continue
|
||||
|
||||
yield ''
|
||||
yield ''
|
||||
yield 'class %s(models.Model):' % table2model(table_name)
|
||||
known_models.append(table2model(table_name))
|
||||
used_column_names = [] # Holds column names used in the table so far
|
||||
column_to_field_name = {} # Maps column names to names of model fields
|
||||
for row in table_description:
|
||||
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
|
||||
extra_params = {} # Holds Field parameters such as 'db_column'.
|
||||
column_name = row.name
|
||||
is_relation = column_name in relations
|
||||
|
||||
att_name, params, notes = self.normalize_col_name(
|
||||
column_name, used_column_names, is_relation)
|
||||
extra_params.update(params)
|
||||
comment_notes.extend(notes)
|
||||
|
||||
used_column_names.append(att_name)
|
||||
column_to_field_name[column_name] = att_name
|
||||
|
||||
# Add primary_key and unique, if necessary.
|
||||
if column_name == primary_key_column:
|
||||
extra_params['primary_key'] = True
|
||||
elif column_name in unique_columns:
|
||||
extra_params['unique'] = True
|
||||
|
||||
if is_relation:
|
||||
if extra_params.pop('unique', False) or extra_params.get('primary_key'):
|
||||
rel_type = 'OneToOneField'
|
||||
else:
|
||||
rel_type = 'ForeignKey'
|
||||
rel_to = (
|
||||
"self" if relations[column_name][1] == table_name
|
||||
else table2model(relations[column_name][1])
|
||||
)
|
||||
if rel_to in known_models:
|
||||
field_type = '%s(%s' % (rel_type, rel_to)
|
||||
else:
|
||||
field_type = "%s('%s'" % (rel_type, rel_to)
|
||||
else:
|
||||
# Calling `get_field_type` to get the field type string and any
|
||||
# additional parameters and notes.
|
||||
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
|
||||
extra_params.update(field_params)
|
||||
comment_notes.extend(field_notes)
|
||||
|
||||
field_type += '('
|
||||
|
||||
# Don't output 'id = meta.AutoField(primary_key=True)', because
|
||||
# that's assumed if it doesn't exist.
|
||||
if att_name == 'id' and extra_params == {'primary_key': True}:
|
||||
if field_type == 'AutoField(':
|
||||
continue
|
||||
elif field_type == connection.features.introspected_field_types['AutoField'] + '(':
|
||||
comment_notes.append('AutoField?')
|
||||
|
||||
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
|
||||
# table description.
|
||||
if row.null_ok: # If it's NULL...
|
||||
extra_params['blank'] = True
|
||||
extra_params['null'] = True
|
||||
|
||||
field_desc = '%s = %s%s' % (
|
||||
att_name,
|
||||
# Custom fields will have a dotted path
|
||||
'' if '.' in field_type else 'models.',
|
||||
field_type,
|
||||
)
|
||||
if field_type.startswith(('ForeignKey(', 'OneToOneField(')):
|
||||
field_desc += ', models.DO_NOTHING'
|
||||
|
||||
if extra_params:
|
||||
if not field_desc.endswith('('):
|
||||
field_desc += ', '
|
||||
field_desc += ', '.join('%s=%r' % (k, v) for k, v in extra_params.items())
|
||||
field_desc += ')'
|
||||
if comment_notes:
|
||||
field_desc += ' # ' + ' '.join(comment_notes)
|
||||
yield ' %s' % field_desc
|
||||
is_view = any(info.name == table_name and info.type == 'v' for info in table_info)
|
||||
is_partition = any(info.name == table_name and info.type == 'p' for info in table_info)
|
||||
yield from self.get_meta(table_name, constraints, column_to_field_name, is_view, is_partition)
|
||||
|
||||
def normalize_col_name(self, col_name, used_column_names, is_relation):
|
||||
"""
|
||||
Modify the column name to make it Python-compatible as a field name
|
||||
"""
|
||||
field_params = {}
|
||||
field_notes = []
|
||||
|
||||
new_name = col_name.lower()
|
||||
if new_name != col_name:
|
||||
field_notes.append('Field name made lowercase.')
|
||||
|
||||
if is_relation:
|
||||
if new_name.endswith('_id'):
|
||||
new_name = new_name[:-3]
|
||||
else:
|
||||
field_params['db_column'] = col_name
|
||||
|
||||
new_name, num_repl = re.subn(r'\W', '_', new_name)
|
||||
if num_repl > 0:
|
||||
field_notes.append('Field renamed to remove unsuitable characters.')
|
||||
|
||||
if new_name.find(LOOKUP_SEP) >= 0:
|
||||
while new_name.find(LOOKUP_SEP) >= 0:
|
||||
new_name = new_name.replace(LOOKUP_SEP, '_')
|
||||
if col_name.lower().find(LOOKUP_SEP) >= 0:
|
||||
# Only add the comment if the double underscore was in the original name
|
||||
field_notes.append("Field renamed because it contained more than one '_' in a row.")
|
||||
|
||||
if new_name.startswith('_'):
|
||||
new_name = 'field%s' % new_name
|
||||
field_notes.append("Field renamed because it started with '_'.")
|
||||
|
||||
if new_name.endswith('_'):
|
||||
new_name = '%sfield' % new_name
|
||||
field_notes.append("Field renamed because it ended with '_'.")
|
||||
|
||||
if keyword.iskeyword(new_name):
|
||||
new_name += '_field'
|
||||
field_notes.append('Field renamed because it was a Python reserved word.')
|
||||
|
||||
if new_name[0].isdigit():
|
||||
new_name = 'number_%s' % new_name
|
||||
field_notes.append("Field renamed because it wasn't a valid Python identifier.")
|
||||
|
||||
if new_name in used_column_names:
|
||||
num = 0
|
||||
while '%s_%d' % (new_name, num) in used_column_names:
|
||||
num += 1
|
||||
new_name = '%s_%d' % (new_name, num)
|
||||
field_notes.append('Field renamed because of name conflict.')
|
||||
|
||||
if col_name != new_name and field_notes:
|
||||
field_params['db_column'] = col_name
|
||||
|
||||
return new_name, field_params, field_notes
|
||||
|
||||
def get_field_type(self, connection, table_name, row):
|
||||
"""
|
||||
Given the database connection, the table name, and the cursor row
|
||||
description, this routine will return the given field type name, as
|
||||
well as any additional keyword parameters and notes for the field.
|
||||
"""
|
||||
field_params = {}
|
||||
field_notes = []
|
||||
|
||||
try:
|
||||
field_type = connection.introspection.get_field_type(row.type_code, row)
|
||||
except KeyError:
|
||||
field_type = 'TextField'
|
||||
field_notes.append('This field type is a guess.')
|
||||
|
||||
# Add max_length for all CharFields.
|
||||
if field_type == 'CharField' and row.internal_size:
|
||||
field_params['max_length'] = int(row.internal_size)
|
||||
|
||||
if field_type in {'CharField', 'TextField'} and row.collation:
|
||||
field_params['db_collation'] = row.collation
|
||||
|
||||
if field_type == 'DecimalField':
|
||||
if row.precision is None or row.scale is None:
|
||||
field_notes.append(
|
||||
'max_digits and decimal_places have been guessed, as this '
|
||||
'database handles decimal fields as float')
|
||||
field_params['max_digits'] = row.precision if row.precision is not None else 10
|
||||
field_params['decimal_places'] = row.scale if row.scale is not None else 5
|
||||
else:
|
||||
field_params['max_digits'] = row.precision
|
||||
field_params['decimal_places'] = row.scale
|
||||
|
||||
return field_type, field_params, field_notes
|
||||
|
||||
def get_meta(self, table_name, constraints, column_to_field_name, is_view, is_partition):
|
||||
"""
|
||||
Return a sequence comprising the lines of code necessary
|
||||
to construct the inner Meta class for the model corresponding
|
||||
to the given database table name.
|
||||
"""
|
||||
unique_together = []
|
||||
has_unsupported_constraint = False
|
||||
for params in constraints.values():
|
||||
if params['unique']:
|
||||
columns = params['columns']
|
||||
if None in columns:
|
||||
has_unsupported_constraint = True
|
||||
columns = [x for x in columns if x is not None]
|
||||
if len(columns) > 1:
|
||||
unique_together.append(str(tuple(column_to_field_name[c] for c in columns)))
|
||||
if is_view:
|
||||
managed_comment = " # Created from a view. Don't remove."
|
||||
elif is_partition:
|
||||
managed_comment = " # Created from a partition. Don't remove."
|
||||
else:
|
||||
managed_comment = ''
|
||||
meta = ['']
|
||||
if has_unsupported_constraint:
|
||||
meta.append(' # A unique constraint could not be introspected.')
|
||||
meta += [
|
||||
' class Meta:',
|
||||
' managed = False%s' % managed_comment,
|
||||
' db_table = %r' % table_name
|
||||
]
|
||||
if unique_together:
|
||||
tup = '(' + ', '.join(unique_together) + ',)'
|
||||
meta += [" unique_together = %s" % tup]
|
||||
return meta
|
@@ -0,0 +1,384 @@
|
||||
import functools
|
||||
import glob
|
||||
import gzip
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import zipfile
|
||||
from itertools import product
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core import serializers
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.core.management.utils import parse_apps_and_model_labels
|
||||
from django.db import (
|
||||
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router,
|
||||
transaction,
|
||||
)
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
try:
|
||||
import bz2
|
||||
has_bz2 = True
|
||||
except ImportError:
|
||||
has_bz2 = False
|
||||
|
||||
try:
|
||||
import lzma
|
||||
has_lzma = True
|
||||
except ImportError:
|
||||
has_lzma = False
|
||||
|
||||
READ_STDIN = '-'
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Installs the named fixture(s) in the database.'
|
||||
missing_args_message = (
|
||||
"No database fixture specified. Please provide the path of at least "
|
||||
"one fixture in the command line."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture labels.')
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a specific database to load fixtures into. Defaults to the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--app', dest='app_label',
|
||||
help='Only look for fixtures in the specified app.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ignorenonexistent', '-i', action='store_true', dest='ignore',
|
||||
help='Ignores entries in the serialized data for fields that do not '
|
||||
'currently exist on the model.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append', default=[],
|
||||
help='An app_label or app_label.ModelName to exclude. Can be used multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--format',
|
||||
help='Format of serialized data when reading from stdin.',
|
||||
)
|
||||
|
||||
def handle(self, *fixture_labels, **options):
|
||||
self.ignore = options['ignore']
|
||||
self.using = options['database']
|
||||
self.app_label = options['app_label']
|
||||
self.verbosity = options['verbosity']
|
||||
self.excluded_models, self.excluded_apps = parse_apps_and_model_labels(options['exclude'])
|
||||
self.format = options['format']
|
||||
|
||||
with transaction.atomic(using=self.using):
|
||||
self.loaddata(fixture_labels)
|
||||
|
||||
# Close the DB connection -- unless we're still in a transaction. This
|
||||
# is required as a workaround for an edge case in MySQL: if the same
|
||||
# connection is used to create tables, load data, and query, the query
|
||||
# can return incorrect results. See Django #7572, MySQL #37735.
|
||||
if transaction.get_autocommit(self.using):
|
||||
connections[self.using].close()
|
||||
|
||||
@cached_property
|
||||
def compression_formats(self):
|
||||
"""A dict mapping format names to (open function, mode arg) tuples."""
|
||||
# Forcing binary mode may be revisited after dropping Python 2 support (see #22399)
|
||||
compression_formats = {
|
||||
None: (open, 'rb'),
|
||||
'gz': (gzip.GzipFile, 'rb'),
|
||||
'zip': (SingleZipReader, 'r'),
|
||||
'stdin': (lambda *args: sys.stdin, None),
|
||||
}
|
||||
if has_bz2:
|
||||
compression_formats['bz2'] = (bz2.BZ2File, 'r')
|
||||
if has_lzma:
|
||||
compression_formats['lzma'] = (lzma.LZMAFile, 'r')
|
||||
compression_formats['xz'] = (lzma.LZMAFile, 'r')
|
||||
return compression_formats
|
||||
|
||||
def reset_sequences(self, connection, models):
|
||||
"""Reset database sequences for the given connection and models."""
|
||||
sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
|
||||
if sequence_sql:
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write('Resetting sequences')
|
||||
with connection.cursor() as cursor:
|
||||
for line in sequence_sql:
|
||||
cursor.execute(line)
|
||||
|
||||
def loaddata(self, fixture_labels):
|
||||
connection = connections[self.using]
|
||||
|
||||
# Keep a count of the installed objects and fixtures
|
||||
self.fixture_count = 0
|
||||
self.loaded_object_count = 0
|
||||
self.fixture_object_count = 0
|
||||
self.models = set()
|
||||
|
||||
self.serialization_formats = serializers.get_public_serializer_formats()
|
||||
|
||||
# Django's test suite repeatedly tries to load initial_data fixtures
|
||||
# from apps that don't have any fixtures. Because disabling constraint
|
||||
# checks can be expensive on some database (especially MSSQL), bail
|
||||
# out early if no fixtures are found.
|
||||
for fixture_label in fixture_labels:
|
||||
if self.find_fixtures(fixture_label):
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
self.objs_with_deferred_fields = []
|
||||
with connection.constraint_checks_disabled():
|
||||
for fixture_label in fixture_labels:
|
||||
self.load_label(fixture_label)
|
||||
for obj in self.objs_with_deferred_fields:
|
||||
obj.save_deferred_fields(using=self.using)
|
||||
|
||||
# Since we disabled constraint checks, we must manually check for
|
||||
# any invalid keys that might have been added
|
||||
table_names = [model._meta.db_table for model in self.models]
|
||||
try:
|
||||
connection.check_constraints(table_names=table_names)
|
||||
except Exception as e:
|
||||
e.args = ("Problem installing fixtures: %s" % e,)
|
||||
raise
|
||||
|
||||
# If we found even one object in a fixture, we need to reset the
|
||||
# database sequences.
|
||||
if self.loaded_object_count > 0:
|
||||
self.reset_sequences(connection, self.models)
|
||||
|
||||
if self.verbosity >= 1:
|
||||
if self.fixture_object_count == self.loaded_object_count:
|
||||
self.stdout.write(
|
||||
"Installed %d object(s) from %d fixture(s)"
|
||||
% (self.loaded_object_count, self.fixture_count)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
"Installed %d object(s) (of %d) from %d fixture(s)"
|
||||
% (self.loaded_object_count, self.fixture_object_count, self.fixture_count)
|
||||
)
|
||||
|
||||
def save_obj(self, obj):
|
||||
"""Save an object if permitted."""
|
||||
if (
|
||||
obj.object._meta.app_config in self.excluded_apps or
|
||||
type(obj.object) in self.excluded_models
|
||||
):
|
||||
return False
|
||||
saved = False
|
||||
if router.allow_migrate_model(self.using, obj.object.__class__):
|
||||
saved = True
|
||||
self.models.add(obj.object.__class__)
|
||||
try:
|
||||
obj.save(using=self.using)
|
||||
# psycopg2 raises ValueError if data contains NUL chars.
|
||||
except (DatabaseError, IntegrityError, ValueError) as e:
|
||||
e.args = ('Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s' % {
|
||||
'object_label': obj.object._meta.label,
|
||||
'pk': obj.object.pk,
|
||||
'error_msg': e,
|
||||
},)
|
||||
raise
|
||||
if obj.deferred_fields:
|
||||
self.objs_with_deferred_fields.append(obj)
|
||||
return saved
|
||||
|
||||
def load_label(self, fixture_label):
|
||||
"""Load fixtures files for a given label."""
|
||||
show_progress = self.verbosity >= 3
|
||||
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
|
||||
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
|
||||
open_method, mode = self.compression_formats[cmp_fmt]
|
||||
fixture = open_method(fixture_file, mode)
|
||||
self.fixture_count += 1
|
||||
objects_in_fixture = 0
|
||||
loaded_objects_in_fixture = 0
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write(
|
||||
"Installing %s fixture '%s' from %s."
|
||||
% (ser_fmt, fixture_name, humanize(fixture_dir))
|
||||
)
|
||||
try:
|
||||
objects = serializers.deserialize(
|
||||
ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,
|
||||
handle_forward_references=True,
|
||||
)
|
||||
|
||||
for obj in objects:
|
||||
objects_in_fixture += 1
|
||||
if self.save_obj(obj):
|
||||
loaded_objects_in_fixture += 1
|
||||
if show_progress:
|
||||
self.stdout.write(
|
||||
'\rProcessed %i object(s).' % loaded_objects_in_fixture,
|
||||
ending=''
|
||||
)
|
||||
except Exception as e:
|
||||
if not isinstance(e, CommandError):
|
||||
e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
|
||||
raise
|
||||
finally:
|
||||
fixture.close()
|
||||
|
||||
if objects_in_fixture and show_progress:
|
||||
self.stdout.write() # Add a newline after progress indicator.
|
||||
self.loaded_object_count += loaded_objects_in_fixture
|
||||
self.fixture_object_count += objects_in_fixture
|
||||
# Warn if the fixture we loaded contains 0 objects.
|
||||
if objects_in_fixture == 0:
|
||||
warnings.warn(
|
||||
"No fixture data found for '%s'. (File format may be "
|
||||
"invalid.)" % fixture_name,
|
||||
RuntimeWarning
|
||||
)
|
||||
|
||||
def get_fixture_name_and_dirs(self, fixture_name):
|
||||
dirname, basename = os.path.split(fixture_name)
|
||||
if os.path.isabs(fixture_name):
|
||||
fixture_dirs = [dirname]
|
||||
else:
|
||||
fixture_dirs = self.fixture_dirs
|
||||
if os.path.sep in os.path.normpath(fixture_name):
|
||||
fixture_dirs = [os.path.join(dir_, dirname) for dir_ in fixture_dirs]
|
||||
return basename, fixture_dirs
|
||||
|
||||
def get_targets(self, fixture_name, ser_fmt, cmp_fmt):
|
||||
databases = [self.using, None]
|
||||
cmp_fmts = self.compression_formats if cmp_fmt is None else [cmp_fmt]
|
||||
ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt]
|
||||
return {
|
||||
'%s.%s' % (
|
||||
fixture_name,
|
||||
'.'.join([ext for ext in combo if ext]),
|
||||
) for combo in product(databases, ser_fmts, cmp_fmts)
|
||||
}
|
||||
|
||||
def find_fixture_files_in_dir(self, fixture_dir, fixture_name, targets):
|
||||
fixture_files_in_dir = []
|
||||
path = os.path.join(fixture_dir, fixture_name)
|
||||
for candidate in glob.iglob(glob.escape(path) + '*'):
|
||||
if os.path.basename(candidate) in targets:
|
||||
# Save the fixture_dir and fixture_name for future error
|
||||
# messages.
|
||||
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
|
||||
return fixture_files_in_dir
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def find_fixtures(self, fixture_label):
|
||||
"""Find fixture files for a given label."""
|
||||
if fixture_label == READ_STDIN:
|
||||
return [(READ_STDIN, None, READ_STDIN)]
|
||||
|
||||
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
||||
|
||||
fixture_name, fixture_dirs = self.get_fixture_name_and_dirs(fixture_name)
|
||||
targets = self.get_targets(fixture_name, ser_fmt, cmp_fmt)
|
||||
fixture_files = []
|
||||
for fixture_dir in fixture_dirs:
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
|
||||
fixture_files_in_dir = self.find_fixture_files_in_dir(
|
||||
fixture_dir, fixture_name, targets,
|
||||
)
|
||||
if self.verbosity >= 2 and not fixture_files_in_dir:
|
||||
self.stdout.write("No fixture '%s' in %s." %
|
||||
(fixture_name, humanize(fixture_dir)))
|
||||
|
||||
# Check kept for backwards-compatibility; it isn't clear why
|
||||
# duplicates are only allowed in different directories.
|
||||
if len(fixture_files_in_dir) > 1:
|
||||
raise CommandError(
|
||||
"Multiple fixtures named '%s' in %s. Aborting." %
|
||||
(fixture_name, humanize(fixture_dir)))
|
||||
fixture_files.extend(fixture_files_in_dir)
|
||||
|
||||
if not fixture_files:
|
||||
raise CommandError("No fixture named '%s' found." % fixture_name)
|
||||
|
||||
return fixture_files
|
||||
|
||||
@cached_property
|
||||
def fixture_dirs(self):
|
||||
"""
|
||||
Return a list of fixture directories.
|
||||
|
||||
The list contains the 'fixtures' subdirectory of each installed
|
||||
application, if it exists, the directories in FIXTURE_DIRS, and the
|
||||
current directory.
|
||||
"""
|
||||
dirs = []
|
||||
fixture_dirs = settings.FIXTURE_DIRS
|
||||
if len(fixture_dirs) != len(set(fixture_dirs)):
|
||||
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
|
||||
for app_config in apps.get_app_configs():
|
||||
app_label = app_config.label
|
||||
app_dir = os.path.join(app_config.path, 'fixtures')
|
||||
if app_dir in fixture_dirs:
|
||||
raise ImproperlyConfigured(
|
||||
"'%s' is a default fixture directory for the '%s' app "
|
||||
"and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label)
|
||||
)
|
||||
|
||||
if self.app_label and app_label != self.app_label:
|
||||
continue
|
||||
if os.path.isdir(app_dir):
|
||||
dirs.append(app_dir)
|
||||
dirs.extend(fixture_dirs)
|
||||
dirs.append('')
|
||||
return [os.path.realpath(d) for d in dirs]
|
||||
|
||||
def parse_name(self, fixture_name):
|
||||
"""
|
||||
Split fixture name in name, serialization format, compression format.
|
||||
"""
|
||||
if fixture_name == READ_STDIN:
|
||||
if not self.format:
|
||||
raise CommandError('--format must be specified when reading from stdin.')
|
||||
return READ_STDIN, self.format, 'stdin'
|
||||
|
||||
parts = fixture_name.rsplit('.', 2)
|
||||
|
||||
if len(parts) > 1 and parts[-1] in self.compression_formats:
|
||||
cmp_fmt = parts[-1]
|
||||
parts = parts[:-1]
|
||||
else:
|
||||
cmp_fmt = None
|
||||
|
||||
if len(parts) > 1:
|
||||
if parts[-1] in self.serialization_formats:
|
||||
ser_fmt = parts[-1]
|
||||
parts = parts[:-1]
|
||||
else:
|
||||
raise CommandError(
|
||||
"Problem installing fixture '%s': %s is not a known "
|
||||
"serialization format." % ('.'.join(parts[:-1]), parts[-1]))
|
||||
else:
|
||||
ser_fmt = None
|
||||
|
||||
name = '.'.join(parts)
|
||||
|
||||
return name, ser_fmt, cmp_fmt
|
||||
|
||||
|
||||
class SingleZipReader(zipfile.ZipFile):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if len(self.namelist()) != 1:
|
||||
raise ValueError("Zip-compressed fixtures must contain one file.")
|
||||
|
||||
def read(self):
|
||||
return zipfile.ZipFile.read(self, self.namelist()[0])
|
||||
|
||||
|
||||
def humanize(dirname):
|
||||
return "'%s'" % dirname if dirname else 'absolute path'
|
@@ -0,0 +1,675 @@
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from functools import total_ordering
|
||||
from itertools import dropwhile
|
||||
|
||||
import django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.files.temp import NamedTemporaryFile
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.utils import (
|
||||
find_command, handle_extensions, is_ignored_path, popen_wrapper,
|
||||
)
|
||||
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.jslex import prepare_js_for_gettext
|
||||
from django.utils.regex_helper import _lazy_re_compile
|
||||
from django.utils.text import get_text_list
|
||||
from django.utils.translation import templatize
|
||||
|
||||
plural_forms_re = _lazy_re_compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
|
||||
STATUS_OK = 0
|
||||
NO_LOCALE_DIR = object()
|
||||
|
||||
|
||||
def check_programs(*programs):
|
||||
for program in programs:
|
||||
if find_command(program) is None:
|
||||
raise CommandError(
|
||||
"Can't find %s. Make sure you have GNU gettext tools 0.15 or "
|
||||
"newer installed." % program
|
||||
)
|
||||
|
||||
|
||||
@total_ordering
|
||||
class TranslatableFile:
|
||||
def __init__(self, dirpath, file_name, locale_dir):
|
||||
self.file = file_name
|
||||
self.dirpath = dirpath
|
||||
self.locale_dir = locale_dir
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % (
|
||||
self.__class__.__name__,
|
||||
os.sep.join([self.dirpath, self.file]),
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.path == other.path
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.path < other.path
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.dirpath, self.file)
|
||||
|
||||
|
||||
class BuildFile:
|
||||
"""
|
||||
Represent the state of a translatable file during the build process.
|
||||
"""
|
||||
def __init__(self, command, domain, translatable):
|
||||
self.command = command
|
||||
self.domain = domain
|
||||
self.translatable = translatable
|
||||
|
||||
@cached_property
|
||||
def is_templatized(self):
|
||||
if self.domain == 'djangojs':
|
||||
return self.command.gettext_version < (0, 18, 3)
|
||||
elif self.domain == 'django':
|
||||
file_ext = os.path.splitext(self.translatable.file)[1]
|
||||
return file_ext != '.py'
|
||||
return False
|
||||
|
||||
@cached_property
|
||||
def path(self):
|
||||
return self.translatable.path
|
||||
|
||||
@cached_property
|
||||
def work_path(self):
|
||||
"""
|
||||
Path to a file which is being fed into GNU gettext pipeline. This may
|
||||
be either a translatable or its preprocessed version.
|
||||
"""
|
||||
if not self.is_templatized:
|
||||
return self.path
|
||||
extension = {
|
||||
'djangojs': 'c',
|
||||
'django': 'py',
|
||||
}.get(self.domain)
|
||||
filename = '%s.%s' % (self.translatable.file, extension)
|
||||
return os.path.join(self.translatable.dirpath, filename)
|
||||
|
||||
def preprocess(self):
|
||||
"""
|
||||
Preprocess (if necessary) a translatable file before passing it to
|
||||
xgettext GNU gettext utility.
|
||||
"""
|
||||
if not self.is_templatized:
|
||||
return
|
||||
|
||||
with open(self.path, encoding='utf-8') as fp:
|
||||
src_data = fp.read()
|
||||
|
||||
if self.domain == 'djangojs':
|
||||
content = prepare_js_for_gettext(src_data)
|
||||
elif self.domain == 'django':
|
||||
content = templatize(src_data, origin=self.path[2:])
|
||||
|
||||
with open(self.work_path, 'w', encoding='utf-8') as fp:
|
||||
fp.write(content)
|
||||
|
||||
def postprocess_messages(self, msgs):
|
||||
"""
|
||||
Postprocess messages generated by xgettext GNU gettext utility.
|
||||
|
||||
Transform paths as if these messages were generated from original
|
||||
translatable files rather than from preprocessed versions.
|
||||
"""
|
||||
if not self.is_templatized:
|
||||
return msgs
|
||||
|
||||
# Remove '.py' suffix
|
||||
if os.name == 'nt':
|
||||
# Preserve '.\' prefix on Windows to respect gettext behavior
|
||||
old_path = self.work_path
|
||||
new_path = self.path
|
||||
else:
|
||||
old_path = self.work_path[2:]
|
||||
new_path = self.path[2:]
|
||||
|
||||
return re.sub(
|
||||
r'^(#: .*)(' + re.escape(old_path) + r')',
|
||||
lambda match: match[0].replace(old_path, new_path),
|
||||
msgs,
|
||||
flags=re.MULTILINE
|
||||
)
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
Remove a preprocessed copy of a translatable file (if any).
|
||||
"""
|
||||
if self.is_templatized:
|
||||
# This check is needed for the case of a symlinked file and its
|
||||
# source being processed inside a single group (locale dir);
|
||||
# removing either of those two removes both.
|
||||
if os.path.exists(self.work_path):
|
||||
os.unlink(self.work_path)
|
||||
|
||||
|
||||
def normalize_eols(raw_contents):
|
||||
"""
|
||||
Take a block of raw text that will be passed through str.splitlines() to
|
||||
get universal newlines treatment.
|
||||
|
||||
Return the resulting block of text with normalized `\n` EOL sequences ready
|
||||
to be written to disk using current platform's native EOLs.
|
||||
"""
|
||||
lines_list = raw_contents.splitlines()
|
||||
# Ensure last line has its EOL
|
||||
if lines_list and lines_list[-1]:
|
||||
lines_list.append('')
|
||||
return '\n'.join(lines_list)
|
||||
|
||||
|
||||
def write_pot_file(potfile, msgs):
|
||||
"""
|
||||
Write the `potfile` with the `msgs` contents, making sure its format is
|
||||
valid.
|
||||
"""
|
||||
pot_lines = msgs.splitlines()
|
||||
if os.path.exists(potfile):
|
||||
# Strip the header
|
||||
lines = dropwhile(len, pot_lines)
|
||||
else:
|
||||
lines = []
|
||||
found, header_read = False, False
|
||||
for line in pot_lines:
|
||||
if not found and not header_read:
|
||||
if 'charset=CHARSET' in line:
|
||||
found = True
|
||||
line = line.replace('charset=CHARSET', 'charset=UTF-8')
|
||||
if not line and not found:
|
||||
header_read = True
|
||||
lines.append(line)
|
||||
msgs = '\n'.join(lines)
|
||||
# Force newlines of POT files to '\n' to work around
|
||||
# https://savannah.gnu.org/bugs/index.php?52395
|
||||
with open(potfile, 'a', encoding='utf-8', newline='\n') as fp:
|
||||
fp.write(msgs)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Runs over the entire source tree of the current directory and "
|
||||
"pulls out all strings marked for translation. It creates (or updates) a message "
|
||||
"file in the conf/locale (in the django tree) or locale (for projects and "
|
||||
"applications) directory.\n\nYou must run this command with one of either the "
|
||||
"--locale, --exclude, or --all options."
|
||||
)
|
||||
|
||||
translatable_file_class = TranslatableFile
|
||||
build_file_class = BuildFile
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
msgmerge_options = ['-q', '--previous']
|
||||
msguniq_options = ['--to-code=utf-8']
|
||||
msgattrib_options = ['--no-obsolete']
|
||||
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--locale', '-l', default=[], action='append',
|
||||
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
|
||||
'Can be used multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--exclude', '-x', default=[], action='append',
|
||||
help='Locales to exclude. Default is none. Can be used multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--domain', '-d', default='django',
|
||||
help='The domain of the message files (default: "django").',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--all', '-a', action='store_true',
|
||||
help='Updates the message files for all existing locales.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--extension', '-e', dest='extensions', action='append',
|
||||
help='The file extension(s) to examine (default: "html,txt,py", or "js" '
|
||||
'if the domain is "djangojs"). Separate multiple extensions with '
|
||||
'commas, or use -e multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--symlinks', '-s', action='store_true',
|
||||
help='Follows symlinks to directories when examining source code '
|
||||
'and templates for translation strings.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ignore', '-i', action='append', dest='ignore_patterns',
|
||||
default=[], metavar='PATTERN',
|
||||
help='Ignore files or directories matching this glob-style pattern. '
|
||||
'Use multiple times to ignore more.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
|
||||
help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-wrap', action='store_true',
|
||||
help="Don't break long message lines into several lines.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-location', action='store_true',
|
||||
help="Don't write '#: filename:line' lines.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--add-location',
|
||||
choices=('full', 'file', 'never'), const='full', nargs='?',
|
||||
help=(
|
||||
"Controls '#: filename:line' lines. If the option is 'full' "
|
||||
"(the default if not given), the lines include both file name "
|
||||
"and line number. If it's 'file', the line number is omitted. If "
|
||||
"it's 'never', the lines are suppressed (same as --no-location). "
|
||||
"--add-location requires gettext 0.19 or newer."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-obsolete', action='store_true',
|
||||
help="Remove obsolete message strings.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--keep-pot', action='store_true',
|
||||
help="Keep .pot file after making messages. Useful when debugging.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
locale = options['locale']
|
||||
exclude = options['exclude']
|
||||
self.domain = options['domain']
|
||||
self.verbosity = options['verbosity']
|
||||
process_all = options['all']
|
||||
extensions = options['extensions']
|
||||
self.symlinks = options['symlinks']
|
||||
|
||||
ignore_patterns = options['ignore_patterns']
|
||||
if options['use_default_ignore_patterns']:
|
||||
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
|
||||
self.ignore_patterns = list(set(ignore_patterns))
|
||||
|
||||
# Avoid messing with mutable class variables
|
||||
if options['no_wrap']:
|
||||
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
|
||||
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
|
||||
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
|
||||
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
|
||||
if options['no_location']:
|
||||
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
|
||||
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
|
||||
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
|
||||
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
|
||||
if options['add_location']:
|
||||
if self.gettext_version < (0, 19):
|
||||
raise CommandError(
|
||||
"The --add-location option requires gettext 0.19 or later. "
|
||||
"You have %s." % '.'.join(str(x) for x in self.gettext_version)
|
||||
)
|
||||
arg_add_location = "--add-location=%s" % options['add_location']
|
||||
self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location]
|
||||
self.msguniq_options = self.msguniq_options[:] + [arg_add_location]
|
||||
self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location]
|
||||
self.xgettext_options = self.xgettext_options[:] + [arg_add_location]
|
||||
|
||||
self.no_obsolete = options['no_obsolete']
|
||||
self.keep_pot = options['keep_pot']
|
||||
|
||||
if self.domain not in ('django', 'djangojs'):
|
||||
raise CommandError("currently makemessages only supports domains "
|
||||
"'django' and 'djangojs'")
|
||||
if self.domain == 'djangojs':
|
||||
exts = extensions or ['js']
|
||||
else:
|
||||
exts = extensions or ['html', 'txt', 'py']
|
||||
self.extensions = handle_extensions(exts)
|
||||
|
||||
if (not locale and not exclude and not process_all) or self.domain is None:
|
||||
raise CommandError(
|
||||
"Type '%s help %s' for usage information."
|
||||
% (os.path.basename(sys.argv[0]), sys.argv[1])
|
||||
)
|
||||
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write(
|
||||
'examining files with the extensions: %s'
|
||||
% get_text_list(list(self.extensions), 'and')
|
||||
)
|
||||
|
||||
self.invoked_for_django = False
|
||||
self.locale_paths = []
|
||||
self.default_locale_path = None
|
||||
if os.path.isdir(os.path.join('conf', 'locale')):
|
||||
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
|
||||
self.default_locale_path = self.locale_paths[0]
|
||||
self.invoked_for_django = True
|
||||
else:
|
||||
if self.settings_available:
|
||||
self.locale_paths.extend(settings.LOCALE_PATHS)
|
||||
# Allow to run makemessages inside an app dir
|
||||
if os.path.isdir('locale'):
|
||||
self.locale_paths.append(os.path.abspath('locale'))
|
||||
if self.locale_paths:
|
||||
self.default_locale_path = self.locale_paths[0]
|
||||
os.makedirs(self.default_locale_path, exist_ok=True)
|
||||
|
||||
# Build locale list
|
||||
looks_like_locale = re.compile(r'[a-z]{2}')
|
||||
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
|
||||
all_locales = [
|
||||
lang_code for lang_code in map(os.path.basename, locale_dirs)
|
||||
if looks_like_locale.match(lang_code)
|
||||
]
|
||||
|
||||
# Account for excluded locales
|
||||
if process_all:
|
||||
locales = all_locales
|
||||
else:
|
||||
locales = locale or all_locales
|
||||
locales = set(locales).difference(exclude)
|
||||
|
||||
if locales:
|
||||
check_programs('msguniq', 'msgmerge', 'msgattrib')
|
||||
|
||||
check_programs('xgettext')
|
||||
|
||||
try:
|
||||
potfiles = self.build_potfiles()
|
||||
|
||||
# Build po files for each selected locale
|
||||
for locale in locales:
|
||||
if '-' in locale:
|
||||
self.stdout.write(
|
||||
'invalid locale %s, did you mean %s?' % (
|
||||
locale,
|
||||
locale.replace('-', '_'),
|
||||
),
|
||||
)
|
||||
continue
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write('processing locale %s' % locale)
|
||||
for potfile in potfiles:
|
||||
self.write_po_file(potfile, locale)
|
||||
finally:
|
||||
if not self.keep_pot:
|
||||
self.remove_potfiles()
|
||||
|
||||
@cached_property
|
||||
def gettext_version(self):
|
||||
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
|
||||
# when looking up the version. It's especially a problem on Windows.
|
||||
out, err, status = popen_wrapper(
|
||||
['xgettext', '--version'],
|
||||
stdout_encoding=DEFAULT_LOCALE_ENCODING,
|
||||
)
|
||||
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
|
||||
if m:
|
||||
return tuple(int(d) for d in m.groups() if d is not None)
|
||||
else:
|
||||
raise CommandError("Unable to get gettext version. Is it installed?")
|
||||
|
||||
@cached_property
|
||||
def settings_available(self):
|
||||
try:
|
||||
settings.LOCALE_PATHS
|
||||
except ImproperlyConfigured:
|
||||
if self.verbosity > 1:
|
||||
self.stderr.write("Running without configured settings.")
|
||||
return False
|
||||
return True
|
||||
|
||||
def build_potfiles(self):
|
||||
"""
|
||||
Build pot files and apply msguniq to them.
|
||||
"""
|
||||
file_list = self.find_files(".")
|
||||
self.remove_potfiles()
|
||||
self.process_files(file_list)
|
||||
potfiles = []
|
||||
for path in self.locale_paths:
|
||||
potfile = os.path.join(path, '%s.pot' % self.domain)
|
||||
if not os.path.exists(potfile):
|
||||
continue
|
||||
args = ['msguniq'] + self.msguniq_options + [potfile]
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
raise CommandError(
|
||||
"errors happened while running msguniq\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
msgs = normalize_eols(msgs)
|
||||
with open(potfile, 'w', encoding='utf-8') as fp:
|
||||
fp.write(msgs)
|
||||
potfiles.append(potfile)
|
||||
return potfiles
|
||||
|
||||
def remove_potfiles(self):
|
||||
for path in self.locale_paths:
|
||||
pot_path = os.path.join(path, '%s.pot' % self.domain)
|
||||
if os.path.exists(pot_path):
|
||||
os.unlink(pot_path)
|
||||
|
||||
def find_files(self, root):
|
||||
"""
|
||||
Get all files in the given root. Also check that there is a matching
|
||||
locale dir for each file.
|
||||
"""
|
||||
all_files = []
|
||||
ignored_roots = []
|
||||
if self.settings_available:
|
||||
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
|
||||
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
|
||||
for dirname in dirnames[:]:
|
||||
if (is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), self.ignore_patterns) or
|
||||
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
|
||||
dirnames.remove(dirname)
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('ignoring directory %s' % dirname)
|
||||
elif dirname == 'locale':
|
||||
dirnames.remove(dirname)
|
||||
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
|
||||
for filename in filenames:
|
||||
file_path = os.path.normpath(os.path.join(dirpath, filename))
|
||||
file_ext = os.path.splitext(filename)[1]
|
||||
if file_ext not in self.extensions or is_ignored_path(file_path, self.ignore_patterns):
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('ignoring file %s in %s' % (filename, dirpath))
|
||||
else:
|
||||
locale_dir = None
|
||||
for path in self.locale_paths:
|
||||
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
||||
locale_dir = path
|
||||
break
|
||||
locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR
|
||||
all_files.append(self.translatable_file_class(dirpath, filename, locale_dir))
|
||||
return sorted(all_files)
|
||||
|
||||
def process_files(self, file_list):
|
||||
"""
|
||||
Group translatable files by locale directory and run pot file build
|
||||
process for each group.
|
||||
"""
|
||||
file_groups = {}
|
||||
for translatable in file_list:
|
||||
file_group = file_groups.setdefault(translatable.locale_dir, [])
|
||||
file_group.append(translatable)
|
||||
for locale_dir, files in file_groups.items():
|
||||
self.process_locale_dir(locale_dir, files)
|
||||
|
||||
def process_locale_dir(self, locale_dir, files):
|
||||
"""
|
||||
Extract translatable literals from the specified files, creating or
|
||||
updating the POT file for a given locale directory.
|
||||
|
||||
Use the xgettext GNU gettext utility.
|
||||
"""
|
||||
build_files = []
|
||||
for translatable in files:
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('processing file %s in %s' % (
|
||||
translatable.file, translatable.dirpath
|
||||
))
|
||||
if self.domain not in ('djangojs', 'django'):
|
||||
continue
|
||||
build_file = self.build_file_class(self, self.domain, translatable)
|
||||
try:
|
||||
build_file.preprocess()
|
||||
except UnicodeDecodeError as e:
|
||||
self.stdout.write(
|
||||
'UnicodeDecodeError: skipped file %s in %s (reason: %s)' % (
|
||||
translatable.file, translatable.dirpath, e,
|
||||
)
|
||||
)
|
||||
continue
|
||||
except BaseException:
|
||||
# Cleanup before exit.
|
||||
for build_file in build_files:
|
||||
build_file.cleanup()
|
||||
raise
|
||||
build_files.append(build_file)
|
||||
|
||||
if self.domain == 'djangojs':
|
||||
is_templatized = build_file.is_templatized
|
||||
args = [
|
||||
'xgettext',
|
||||
'-d', self.domain,
|
||||
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
|
||||
'--keyword=gettext_noop',
|
||||
'--keyword=gettext_lazy',
|
||||
'--keyword=ngettext_lazy:1,2',
|
||||
'--keyword=pgettext:1c,2',
|
||||
'--keyword=npgettext:1c,2,3',
|
||||
'--output=-',
|
||||
]
|
||||
elif self.domain == 'django':
|
||||
args = [
|
||||
'xgettext',
|
||||
'-d', self.domain,
|
||||
'--language=Python',
|
||||
'--keyword=gettext_noop',
|
||||
'--keyword=gettext_lazy',
|
||||
'--keyword=ngettext_lazy:1,2',
|
||||
'--keyword=pgettext:1c,2',
|
||||
'--keyword=npgettext:1c,2,3',
|
||||
'--keyword=pgettext_lazy:1c,2',
|
||||
'--keyword=npgettext_lazy:1c,2,3',
|
||||
'--output=-',
|
||||
]
|
||||
else:
|
||||
return
|
||||
|
||||
input_files = [bf.work_path for bf in build_files]
|
||||
with NamedTemporaryFile(mode='w+') as input_files_list:
|
||||
input_files_list.write('\n'.join(input_files))
|
||||
input_files_list.flush()
|
||||
args.extend(['--files-from', input_files_list.name])
|
||||
args.extend(self.xgettext_options)
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
for build_file in build_files:
|
||||
build_file.cleanup()
|
||||
raise CommandError(
|
||||
'errors happened while running xgettext on %s\n%s' %
|
||||
('\n'.join(input_files), errors)
|
||||
)
|
||||
elif self.verbosity > 0:
|
||||
# Print warnings
|
||||
self.stdout.write(errors)
|
||||
|
||||
if msgs:
|
||||
if locale_dir is NO_LOCALE_DIR:
|
||||
for build_file in build_files:
|
||||
build_file.cleanup()
|
||||
file_path = os.path.normpath(build_files[0].path)
|
||||
raise CommandError(
|
||||
"Unable to find a locale path to store translations for "
|
||||
"file %s. Make sure the 'locale' directory exists in an "
|
||||
"app or LOCALE_PATHS setting is set." % file_path
|
||||
)
|
||||
for build_file in build_files:
|
||||
msgs = build_file.postprocess_messages(msgs)
|
||||
potfile = os.path.join(locale_dir, '%s.pot' % self.domain)
|
||||
write_pot_file(potfile, msgs)
|
||||
|
||||
for build_file in build_files:
|
||||
build_file.cleanup()
|
||||
|
||||
def write_po_file(self, potfile, locale):
|
||||
"""
|
||||
Create or update the PO file for self.domain and `locale`.
|
||||
Use contents of the existing `potfile`.
|
||||
|
||||
Use msgmerge and msgattrib GNU gettext utilities.
|
||||
"""
|
||||
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
|
||||
os.makedirs(basedir, exist_ok=True)
|
||||
pofile = os.path.join(basedir, '%s.po' % self.domain)
|
||||
|
||||
if os.path.exists(pofile):
|
||||
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
raise CommandError(
|
||||
"errors happened while running msgmerge\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
else:
|
||||
with open(potfile, encoding='utf-8') as fp:
|
||||
msgs = fp.read()
|
||||
if not self.invoked_for_django:
|
||||
msgs = self.copy_plural_forms(msgs, locale)
|
||||
msgs = normalize_eols(msgs)
|
||||
msgs = msgs.replace(
|
||||
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
|
||||
with open(pofile, 'w', encoding='utf-8') as fp:
|
||||
fp.write(msgs)
|
||||
|
||||
if self.no_obsolete:
|
||||
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
raise CommandError(
|
||||
"errors happened while running msgattrib\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
|
||||
def copy_plural_forms(self, msgs, locale):
|
||||
"""
|
||||
Copy plural forms header contents from a Django catalog of locale to
|
||||
the msgs string, inserting it at the right place. msgs should be the
|
||||
contents of a newly created .po file.
|
||||
"""
|
||||
django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
|
||||
if self.domain == 'djangojs':
|
||||
domains = ('djangojs', 'django')
|
||||
else:
|
||||
domains = ('django',)
|
||||
for domain in domains:
|
||||
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
|
||||
if os.path.exists(django_po):
|
||||
with open(django_po, encoding='utf-8') as fp:
|
||||
m = plural_forms_re.search(fp.read())
|
||||
if m:
|
||||
plural_form_line = m['value']
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('copying plural forms: %s' % plural_form_line)
|
||||
lines = []
|
||||
found = False
|
||||
for line in msgs.splitlines():
|
||||
if not found and (not line or plural_forms_re.search(line)):
|
||||
line = plural_form_line
|
||||
found = True
|
||||
lines.append(line)
|
||||
msgs = '\n'.join(lines)
|
||||
break
|
||||
return msgs
|
@@ -0,0 +1,325 @@
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from itertools import takewhile
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.management.base import (
|
||||
BaseCommand, CommandError, no_translations,
|
||||
)
|
||||
from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router
|
||||
from django.db.migrations import Migration
|
||||
from django.db.migrations.autodetector import MigrationAutodetector
|
||||
from django.db.migrations.loader import MigrationLoader
|
||||
from django.db.migrations.questioner import (
|
||||
InteractiveMigrationQuestioner, MigrationQuestioner,
|
||||
NonInteractiveMigrationQuestioner,
|
||||
)
|
||||
from django.db.migrations.state import ProjectState
|
||||
from django.db.migrations.utils import get_migration_name_timestamp
|
||||
from django.db.migrations.writer import MigrationWriter
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates new migration(s) for apps."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'args', metavar='app_label', nargs='*',
|
||||
help='Specify the app label(s) to create migrations for.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run', action='store_true',
|
||||
help="Just show what migrations would be made; don't actually write them.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--merge', action='store_true',
|
||||
help="Enable fixing of migration conflicts.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--empty', action='store_true',
|
||||
help="Create an empty migration.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-n', '--name',
|
||||
help="Use this name for migration file(s).",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-header', action='store_false', dest='include_header',
|
||||
help='Do not add header comments to new migration file(s).',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--check', action='store_true', dest='check_changes',
|
||||
help='Exit with a non-zero status if model changes are missing migrations.',
|
||||
)
|
||||
|
||||
@no_translations
|
||||
def handle(self, *app_labels, **options):
|
||||
self.verbosity = options['verbosity']
|
||||
self.interactive = options['interactive']
|
||||
self.dry_run = options['dry_run']
|
||||
self.merge = options['merge']
|
||||
self.empty = options['empty']
|
||||
self.migration_name = options['name']
|
||||
if self.migration_name and not self.migration_name.isidentifier():
|
||||
raise CommandError('The migration name must be a valid Python identifier.')
|
||||
self.include_header = options['include_header']
|
||||
check_changes = options['check_changes']
|
||||
|
||||
# Make sure the app they asked for exists
|
||||
app_labels = set(app_labels)
|
||||
has_bad_labels = False
|
||||
for app_label in app_labels:
|
||||
try:
|
||||
apps.get_app_config(app_label)
|
||||
except LookupError as err:
|
||||
self.stderr.write(str(err))
|
||||
has_bad_labels = True
|
||||
if has_bad_labels:
|
||||
sys.exit(2)
|
||||
|
||||
# Load the current graph state. Pass in None for the connection so
|
||||
# the loader doesn't try to resolve replaced migrations from DB.
|
||||
loader = MigrationLoader(None, ignore_no_migrations=True)
|
||||
|
||||
# Raise an error if any migrations are applied before their dependencies.
|
||||
consistency_check_labels = {config.label for config in apps.get_app_configs()}
|
||||
# Non-default databases are only checked if database routers used.
|
||||
aliases_to_check = connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS]
|
||||
for alias in sorted(aliases_to_check):
|
||||
connection = connections[alias]
|
||||
if (connection.settings_dict['ENGINE'] != 'django.db.backends.dummy' and any(
|
||||
# At least one model must be migrated to the database.
|
||||
router.allow_migrate(connection.alias, app_label, model_name=model._meta.object_name)
|
||||
for app_label in consistency_check_labels
|
||||
for model in apps.get_app_config(app_label).get_models()
|
||||
)):
|
||||
try:
|
||||
loader.check_consistent_history(connection)
|
||||
except OperationalError as error:
|
||||
warnings.warn(
|
||||
"Got an error checking a consistent migration history "
|
||||
"performed for database connection '%s': %s"
|
||||
% (alias, error),
|
||||
RuntimeWarning,
|
||||
)
|
||||
# Before anything else, see if there's conflicting apps and drop out
|
||||
# hard if there are any and they don't want to merge
|
||||
conflicts = loader.detect_conflicts()
|
||||
|
||||
# If app_labels is specified, filter out conflicting migrations for unspecified apps
|
||||
if app_labels:
|
||||
conflicts = {
|
||||
app_label: conflict for app_label, conflict in conflicts.items()
|
||||
if app_label in app_labels
|
||||
}
|
||||
|
||||
if conflicts and not self.merge:
|
||||
name_str = "; ".join(
|
||||
"%s in %s" % (", ".join(names), app)
|
||||
for app, names in conflicts.items()
|
||||
)
|
||||
raise CommandError(
|
||||
"Conflicting migrations detected; multiple leaf nodes in the "
|
||||
"migration graph: (%s).\nTo fix them run "
|
||||
"'python manage.py makemigrations --merge'" % name_str
|
||||
)
|
||||
|
||||
# If they want to merge and there's nothing to merge, then politely exit
|
||||
if self.merge and not conflicts:
|
||||
self.stdout.write("No conflicts detected to merge.")
|
||||
return
|
||||
|
||||
# If they want to merge and there is something to merge, then
|
||||
# divert into the merge code
|
||||
if self.merge and conflicts:
|
||||
return self.handle_merge(loader, conflicts)
|
||||
|
||||
if self.interactive:
|
||||
questioner = InteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run)
|
||||
else:
|
||||
questioner = NonInteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run)
|
||||
# Set up autodetector
|
||||
autodetector = MigrationAutodetector(
|
||||
loader.project_state(),
|
||||
ProjectState.from_apps(apps),
|
||||
questioner,
|
||||
)
|
||||
|
||||
# If they want to make an empty migration, make one for each app
|
||||
if self.empty:
|
||||
if not app_labels:
|
||||
raise CommandError("You must supply at least one app label when using --empty.")
|
||||
# Make a fake changes() result we can pass to arrange_for_graph
|
||||
changes = {
|
||||
app: [Migration("custom", app)]
|
||||
for app in app_labels
|
||||
}
|
||||
changes = autodetector.arrange_for_graph(
|
||||
changes=changes,
|
||||
graph=loader.graph,
|
||||
migration_name=self.migration_name,
|
||||
)
|
||||
self.write_migration_files(changes)
|
||||
return
|
||||
|
||||
# Detect changes
|
||||
changes = autodetector.changes(
|
||||
graph=loader.graph,
|
||||
trim_to_apps=app_labels or None,
|
||||
convert_apps=app_labels or None,
|
||||
migration_name=self.migration_name,
|
||||
)
|
||||
|
||||
if not changes:
|
||||
# No changes? Tell them.
|
||||
if self.verbosity >= 1:
|
||||
if app_labels:
|
||||
if len(app_labels) == 1:
|
||||
self.stdout.write("No changes detected in app '%s'" % app_labels.pop())
|
||||
else:
|
||||
self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels)))
|
||||
else:
|
||||
self.stdout.write("No changes detected")
|
||||
else:
|
||||
self.write_migration_files(changes)
|
||||
if check_changes:
|
||||
sys.exit(1)
|
||||
|
||||
def write_migration_files(self, changes):
|
||||
"""
|
||||
Take a changes dict and write them out as migration files.
|
||||
"""
|
||||
directory_created = {}
|
||||
for app_label, app_migrations in changes.items():
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label))
|
||||
for migration in app_migrations:
|
||||
# Describe the migration
|
||||
writer = MigrationWriter(migration, self.include_header)
|
||||
if self.verbosity >= 1:
|
||||
# Display a relative path if it's below the current working
|
||||
# directory, or an absolute path otherwise.
|
||||
try:
|
||||
migration_string = os.path.relpath(writer.path)
|
||||
except ValueError:
|
||||
migration_string = writer.path
|
||||
if migration_string.startswith('..'):
|
||||
migration_string = writer.path
|
||||
self.stdout.write(' %s\n' % self.style.MIGRATE_LABEL(migration_string))
|
||||
for operation in migration.operations:
|
||||
self.stdout.write(' - %s' % operation.describe())
|
||||
if not self.dry_run:
|
||||
# Write the migrations file to the disk.
|
||||
migrations_directory = os.path.dirname(writer.path)
|
||||
if not directory_created.get(app_label):
|
||||
os.makedirs(migrations_directory, exist_ok=True)
|
||||
init_path = os.path.join(migrations_directory, "__init__.py")
|
||||
if not os.path.isfile(init_path):
|
||||
open(init_path, "w").close()
|
||||
# We just do this once per app
|
||||
directory_created[app_label] = True
|
||||
migration_string = writer.as_string()
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(migration_string)
|
||||
elif self.verbosity == 3:
|
||||
# Alternatively, makemigrations --dry-run --verbosity 3
|
||||
# will output the migrations to stdout rather than saving
|
||||
# the file to the disk.
|
||||
self.stdout.write(self.style.MIGRATE_HEADING(
|
||||
"Full migrations file '%s':" % writer.filename
|
||||
))
|
||||
self.stdout.write(writer.as_string())
|
||||
|
||||
def handle_merge(self, loader, conflicts):
|
||||
"""
|
||||
Handles merging together conflicted migrations interactively,
|
||||
if it's safe; otherwise, advises on how to fix it.
|
||||
"""
|
||||
if self.interactive:
|
||||
questioner = InteractiveMigrationQuestioner()
|
||||
else:
|
||||
questioner = MigrationQuestioner(defaults={'ask_merge': True})
|
||||
|
||||
for app_label, migration_names in conflicts.items():
|
||||
# Grab out the migrations in question, and work out their
|
||||
# common ancestor.
|
||||
merge_migrations = []
|
||||
for migration_name in migration_names:
|
||||
migration = loader.get_migration(app_label, migration_name)
|
||||
migration.ancestry = [
|
||||
mig for mig in loader.graph.forwards_plan((app_label, migration_name))
|
||||
if mig[0] == migration.app_label
|
||||
]
|
||||
merge_migrations.append(migration)
|
||||
|
||||
def all_items_equal(seq):
|
||||
return all(item == seq[0] for item in seq[1:])
|
||||
|
||||
merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations))
|
||||
common_ancestor_count = sum(1 for common_ancestor_generation
|
||||
in takewhile(all_items_equal, merge_migrations_generations))
|
||||
if not common_ancestor_count:
|
||||
raise ValueError("Could not find common ancestor of %s" % migration_names)
|
||||
# Now work out the operations along each divergent branch
|
||||
for migration in merge_migrations:
|
||||
migration.branch = migration.ancestry[common_ancestor_count:]
|
||||
migrations_ops = (loader.get_migration(node_app, node_name).operations
|
||||
for node_app, node_name in migration.branch)
|
||||
migration.merged_operations = sum(migrations_ops, [])
|
||||
# In future, this could use some of the Optimizer code
|
||||
# (can_optimize_through) to automatically see if they're
|
||||
# mergeable. For now, we always just prompt the user.
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label))
|
||||
for migration in merge_migrations:
|
||||
self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
|
||||
for operation in migration.merged_operations:
|
||||
self.stdout.write(' - %s' % operation.describe())
|
||||
if questioner.ask_merge(app_label):
|
||||
# If they still want to merge it, then write out an empty
|
||||
# file depending on the migrations needing merging.
|
||||
numbers = [
|
||||
MigrationAutodetector.parse_number(migration.name)
|
||||
for migration in merge_migrations
|
||||
]
|
||||
try:
|
||||
biggest_number = max(x for x in numbers if x is not None)
|
||||
except ValueError:
|
||||
biggest_number = 1
|
||||
subclass = type("Migration", (Migration,), {
|
||||
"dependencies": [(app_label, migration.name) for migration in merge_migrations],
|
||||
})
|
||||
parts = ['%04i' % (biggest_number + 1)]
|
||||
if self.migration_name:
|
||||
parts.append(self.migration_name)
|
||||
else:
|
||||
parts.append('merge')
|
||||
leaf_names = '_'.join(sorted(migration.name for migration in merge_migrations))
|
||||
if len(leaf_names) > 47:
|
||||
parts.append(get_migration_name_timestamp())
|
||||
else:
|
||||
parts.append(leaf_names)
|
||||
migration_name = '_'.join(parts)
|
||||
new_migration = subclass(migration_name, app_label)
|
||||
writer = MigrationWriter(new_migration, self.include_header)
|
||||
|
||||
if not self.dry_run:
|
||||
# Write the merge migrations file to the disk
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(writer.as_string())
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write("\nCreated new merge migration %s" % writer.path)
|
||||
elif self.verbosity == 3:
|
||||
# Alternatively, makemigrations --merge --dry-run --verbosity 3
|
||||
# will output the merge migrations to stdout rather than saving
|
||||
# the file to the disk.
|
||||
self.stdout.write(self.style.MIGRATE_HEADING(
|
||||
"Full merge migrations file '%s':" % writer.filename
|
||||
))
|
||||
self.stdout.write(writer.as_string())
|
@@ -0,0 +1,386 @@
|
||||
import sys
|
||||
import time
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.management.base import (
|
||||
BaseCommand, CommandError, no_translations,
|
||||
)
|
||||
from django.core.management.sql import (
|
||||
emit_post_migrate_signal, emit_pre_migrate_signal,
|
||||
)
|
||||
from django.db import DEFAULT_DB_ALIAS, connections, router
|
||||
from django.db.migrations.autodetector import MigrationAutodetector
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.migrations.loader import AmbiguityError
|
||||
from django.db.migrations.state import ModelState, ProjectState
|
||||
from django.utils.module_loading import module_has_submodule
|
||||
from django.utils.text import Truncator
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates database schema. Manages both apps with migrations and those without."
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--skip-checks', action='store_true',
|
||||
help='Skip system checks.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'app_label', nargs='?',
|
||||
help='App label of an application to synchronize the state.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'migration_name', nargs='?',
|
||||
help='Database state will be brought to the state after that '
|
||||
'migration. Use the name "zero" to unapply all migrations.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database',
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to synchronize. Defaults to the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fake', action='store_true',
|
||||
help='Mark migrations as run without actually running them.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--fake-initial', action='store_true',
|
||||
help='Detect if tables already exist and fake-apply initial migrations if so. Make sure '
|
||||
'that the current database schema matches your initial migration before using this '
|
||||
'flag. Django will only check for an existing table name.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--plan', action='store_true',
|
||||
help='Shows a list of the migration actions that will be performed.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--run-syncdb', action='store_true',
|
||||
help='Creates tables for apps without migrations.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--check', action='store_true', dest='check_unapplied',
|
||||
help='Exits with a non-zero status if unapplied migrations exist.',
|
||||
)
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
database = options['database']
|
||||
if not options['skip_checks']:
|
||||
self.check(databases=[database])
|
||||
|
||||
self.verbosity = options['verbosity']
|
||||
self.interactive = options['interactive']
|
||||
|
||||
# Import the 'management' module within each installed app, to register
|
||||
# dispatcher events.
|
||||
for app_config in apps.get_app_configs():
|
||||
if module_has_submodule(app_config.module, "management"):
|
||||
import_module('.management', app_config.name)
|
||||
|
||||
# Get the database we're operating from
|
||||
connection = connections[database]
|
||||
|
||||
# Hook for backends needing any database preparation
|
||||
connection.prepare_database()
|
||||
# Work out which apps have migrations and which do not
|
||||
executor = MigrationExecutor(connection, self.migration_progress_callback)
|
||||
|
||||
# Raise an error if any migrations are applied before their dependencies.
|
||||
executor.loader.check_consistent_history(connection)
|
||||
|
||||
# Before anything else, see if there's conflicting apps and drop out
|
||||
# hard if there are any
|
||||
conflicts = executor.loader.detect_conflicts()
|
||||
if conflicts:
|
||||
name_str = "; ".join(
|
||||
"%s in %s" % (", ".join(names), app)
|
||||
for app, names in conflicts.items()
|
||||
)
|
||||
raise CommandError(
|
||||
"Conflicting migrations detected; multiple leaf nodes in the "
|
||||
"migration graph: (%s).\nTo fix them run "
|
||||
"'python manage.py makemigrations --merge'" % name_str
|
||||
)
|
||||
|
||||
# If they supplied command line arguments, work out what they mean.
|
||||
run_syncdb = options['run_syncdb']
|
||||
target_app_labels_only = True
|
||||
if options['app_label']:
|
||||
# Validate app_label.
|
||||
app_label = options['app_label']
|
||||
try:
|
||||
apps.get_app_config(app_label)
|
||||
except LookupError as err:
|
||||
raise CommandError(str(err))
|
||||
if run_syncdb:
|
||||
if app_label in executor.loader.migrated_apps:
|
||||
raise CommandError("Can't use run_syncdb with app '%s' as it has migrations." % app_label)
|
||||
elif app_label not in executor.loader.migrated_apps:
|
||||
raise CommandError("App '%s' does not have migrations." % app_label)
|
||||
|
||||
if options['app_label'] and options['migration_name']:
|
||||
migration_name = options['migration_name']
|
||||
if migration_name == "zero":
|
||||
targets = [(app_label, None)]
|
||||
else:
|
||||
try:
|
||||
migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
|
||||
except AmbiguityError:
|
||||
raise CommandError(
|
||||
"More than one migration matches '%s' in app '%s'. "
|
||||
"Please be more specific." %
|
||||
(migration_name, app_label)
|
||||
)
|
||||
except KeyError:
|
||||
raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
|
||||
migration_name, app_label))
|
||||
target = (app_label, migration.name)
|
||||
# Partially applied squashed migrations are not included in the
|
||||
# graph, use the last replacement instead.
|
||||
if (
|
||||
target not in executor.loader.graph.nodes and
|
||||
target in executor.loader.replacements
|
||||
):
|
||||
incomplete_migration = executor.loader.replacements[target]
|
||||
target = incomplete_migration.replaces[-1]
|
||||
targets = [target]
|
||||
target_app_labels_only = False
|
||||
elif options['app_label']:
|
||||
targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
|
||||
else:
|
||||
targets = executor.loader.graph.leaf_nodes()
|
||||
|
||||
plan = executor.migration_plan(targets)
|
||||
exit_dry = plan and options['check_unapplied']
|
||||
|
||||
if options['plan']:
|
||||
self.stdout.write('Planned operations:', self.style.MIGRATE_LABEL)
|
||||
if not plan:
|
||||
self.stdout.write(' No planned migration operations.')
|
||||
for migration, backwards in plan:
|
||||
self.stdout.write(str(migration), self.style.MIGRATE_HEADING)
|
||||
for operation in migration.operations:
|
||||
message, is_error = self.describe_operation(operation, backwards)
|
||||
style = self.style.WARNING if is_error else None
|
||||
self.stdout.write(' ' + message, style)
|
||||
if exit_dry:
|
||||
sys.exit(1)
|
||||
return
|
||||
if exit_dry:
|
||||
sys.exit(1)
|
||||
|
||||
# At this point, ignore run_syncdb if there aren't any apps to sync.
|
||||
run_syncdb = options['run_syncdb'] and executor.loader.unmigrated_apps
|
||||
# Print some useful info
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
|
||||
if run_syncdb:
|
||||
if options['app_label']:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_LABEL(" Synchronize unmigrated app: %s" % app_label)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") +
|
||||
(", ".join(sorted(executor.loader.unmigrated_apps)))
|
||||
)
|
||||
if target_app_labels_only:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_LABEL(" Apply all migrations: ") +
|
||||
(", ".join(sorted({a for a, n in targets})) or "(none)")
|
||||
)
|
||||
else:
|
||||
if targets[0][1] is None:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_LABEL(' Unapply all migrations: ') +
|
||||
str(targets[0][0])
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.MIGRATE_LABEL(
|
||||
" Target specific migration: ") + "%s, from %s"
|
||||
% (targets[0][1], targets[0][0])
|
||||
)
|
||||
|
||||
pre_migrate_state = executor._create_project_state(with_applied_migrations=True)
|
||||
pre_migrate_apps = pre_migrate_state.apps
|
||||
emit_pre_migrate_signal(
|
||||
self.verbosity, self.interactive, connection.alias, stdout=self.stdout, apps=pre_migrate_apps, plan=plan,
|
||||
)
|
||||
|
||||
# Run the syncdb phase.
|
||||
if run_syncdb:
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:"))
|
||||
if options['app_label']:
|
||||
self.sync_apps(connection, [app_label])
|
||||
else:
|
||||
self.sync_apps(connection, executor.loader.unmigrated_apps)
|
||||
|
||||
# Migrate!
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
|
||||
if not plan:
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(" No migrations to apply.")
|
||||
# If there's changes that aren't in migrations yet, tell them how to fix it.
|
||||
autodetector = MigrationAutodetector(
|
||||
executor.loader.project_state(),
|
||||
ProjectState.from_apps(apps),
|
||||
)
|
||||
changes = autodetector.changes(graph=executor.loader.graph)
|
||||
if changes:
|
||||
self.stdout.write(self.style.NOTICE(
|
||||
" Your models in app(s): %s have changes that are not "
|
||||
"yet reflected in a migration, and so won't be "
|
||||
"applied." % ", ".join(repr(app) for app in sorted(changes))
|
||||
))
|
||||
self.stdout.write(self.style.NOTICE(
|
||||
" Run 'manage.py makemigrations' to make new "
|
||||
"migrations, and then re-run 'manage.py migrate' to "
|
||||
"apply them."
|
||||
))
|
||||
fake = False
|
||||
fake_initial = False
|
||||
else:
|
||||
fake = options['fake']
|
||||
fake_initial = options['fake_initial']
|
||||
post_migrate_state = executor.migrate(
|
||||
targets, plan=plan, state=pre_migrate_state.clone(), fake=fake,
|
||||
fake_initial=fake_initial,
|
||||
)
|
||||
# post_migrate signals have access to all models. Ensure that all models
|
||||
# are reloaded in case any are delayed.
|
||||
post_migrate_state.clear_delayed_apps_cache()
|
||||
post_migrate_apps = post_migrate_state.apps
|
||||
|
||||
# Re-render models of real apps to include relationships now that
|
||||
# we've got a final state. This wouldn't be necessary if real apps
|
||||
# models were rendered with relationships in the first place.
|
||||
with post_migrate_apps.bulk_update():
|
||||
model_keys = []
|
||||
for model_state in post_migrate_apps.real_models:
|
||||
model_key = model_state.app_label, model_state.name_lower
|
||||
model_keys.append(model_key)
|
||||
post_migrate_apps.unregister_model(*model_key)
|
||||
post_migrate_apps.render_multiple([
|
||||
ModelState.from_model(apps.get_model(*model)) for model in model_keys
|
||||
])
|
||||
|
||||
# Send the post_migrate signal, so individual apps can do whatever they need
|
||||
# to do at this point.
|
||||
emit_post_migrate_signal(
|
||||
self.verbosity, self.interactive, connection.alias, stdout=self.stdout, apps=post_migrate_apps, plan=plan,
|
||||
)
|
||||
|
||||
def migration_progress_callback(self, action, migration=None, fake=False):
|
||||
if self.verbosity >= 1:
|
||||
compute_time = self.verbosity > 1
|
||||
if action == "apply_start":
|
||||
if compute_time:
|
||||
self.start = time.monotonic()
|
||||
self.stdout.write(" Applying %s..." % migration, ending="")
|
||||
self.stdout.flush()
|
||||
elif action == "apply_success":
|
||||
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
||||
if fake:
|
||||
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
|
||||
elif action == "unapply_start":
|
||||
if compute_time:
|
||||
self.start = time.monotonic()
|
||||
self.stdout.write(" Unapplying %s..." % migration, ending="")
|
||||
self.stdout.flush()
|
||||
elif action == "unapply_success":
|
||||
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
||||
if fake:
|
||||
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
|
||||
elif action == "render_start":
|
||||
if compute_time:
|
||||
self.start = time.monotonic()
|
||||
self.stdout.write(" Rendering model states...", ending="")
|
||||
self.stdout.flush()
|
||||
elif action == "render_success":
|
||||
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
||||
self.stdout.write(self.style.SUCCESS(" DONE" + elapsed))
|
||||
|
||||
def sync_apps(self, connection, app_labels):
|
||||
"""Run the old syncdb-style operation on a list of app_labels."""
|
||||
with connection.cursor() as cursor:
|
||||
tables = connection.introspection.table_names(cursor)
|
||||
|
||||
# Build the manifest of apps and models that are to be synchronized.
|
||||
all_models = [
|
||||
(
|
||||
app_config.label,
|
||||
router.get_migratable_models(app_config, connection.alias, include_auto_created=False),
|
||||
)
|
||||
for app_config in apps.get_app_configs()
|
||||
if app_config.models_module is not None and app_config.label in app_labels
|
||||
]
|
||||
|
||||
def model_installed(model):
|
||||
opts = model._meta
|
||||
converter = connection.introspection.identifier_converter
|
||||
return not (
|
||||
(converter(opts.db_table) in tables) or
|
||||
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)
|
||||
)
|
||||
|
||||
manifest = {
|
||||
app_name: list(filter(model_installed, model_list))
|
||||
for app_name, model_list in all_models
|
||||
}
|
||||
|
||||
# Create the tables for each model
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(' Creating tables...')
|
||||
with connection.schema_editor() as editor:
|
||||
for app_name, model_list in manifest.items():
|
||||
for model in model_list:
|
||||
# Never install unmanaged models, etc.
|
||||
if not model._meta.can_migrate(connection):
|
||||
continue
|
||||
if self.verbosity >= 3:
|
||||
self.stdout.write(
|
||||
' Processing %s.%s model' % (app_name, model._meta.object_name)
|
||||
)
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(' Creating table %s' % model._meta.db_table)
|
||||
editor.create_model(model)
|
||||
|
||||
# Deferred SQL is executed when exiting the editor's context.
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(' Running deferred SQL...')
|
||||
|
||||
@staticmethod
|
||||
def describe_operation(operation, backwards):
|
||||
"""Return a string that describes a migration operation for --plan."""
|
||||
prefix = ''
|
||||
is_error = False
|
||||
if hasattr(operation, 'code'):
|
||||
code = operation.reverse_code if backwards else operation.code
|
||||
action = (code.__doc__ or '') if code else None
|
||||
elif hasattr(operation, 'sql'):
|
||||
action = operation.reverse_sql if backwards else operation.sql
|
||||
else:
|
||||
action = ''
|
||||
if backwards:
|
||||
prefix = 'Undo '
|
||||
if action is not None:
|
||||
action = str(action).replace('\n', '')
|
||||
elif backwards:
|
||||
action = 'IRREVERSIBLE'
|
||||
is_error = True
|
||||
if action:
|
||||
action = ' -> ' + action
|
||||
truncated = Truncator(action)
|
||||
return prefix + operation.describe() + truncated.chars(40), is_error
|
@@ -0,0 +1,164 @@
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.servers.basehttp import (
|
||||
WSGIServer, get_internal_wsgi_application, run,
|
||||
)
|
||||
from django.utils import autoreload
|
||||
from django.utils.regex_helper import _lazy_re_compile
|
||||
|
||||
naiveip_re = _lazy_re_compile(r"""^(?:
|
||||
(?P<addr>
|
||||
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
|
||||
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
|
||||
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
|
||||
):)?(?P<port>\d+)$""", re.X)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Starts a lightweight web server for development."
|
||||
|
||||
# Validation is called explicitly each time the server is reloaded.
|
||||
requires_system_checks = []
|
||||
stealth_options = ('shutdown_message',)
|
||||
suppressed_base_arguments = {'--verbosity', '--traceback'}
|
||||
|
||||
default_addr = '127.0.0.1'
|
||||
default_addr_ipv6 = '::1'
|
||||
default_port = '8000'
|
||||
protocol = 'http'
|
||||
server_cls = WSGIServer
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'addrport', nargs='?',
|
||||
help='Optional port number, or ipaddr:port'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ipv6', '-6', action='store_true', dest='use_ipv6',
|
||||
help='Tells Django to use an IPv6 address.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--nothreading', action='store_false', dest='use_threading',
|
||||
help='Tells Django to NOT use threading.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noreload', action='store_false', dest='use_reloader',
|
||||
help='Tells Django to NOT use the auto-reloader.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-checks', action='store_true',
|
||||
help='Skip system checks.',
|
||||
)
|
||||
|
||||
def execute(self, *args, **options):
|
||||
if options['no_color']:
|
||||
# We rely on the environment because it's currently the only
|
||||
# way to reach WSGIRequestHandler. This seems an acceptable
|
||||
# compromise considering `runserver` runs indefinitely.
|
||||
os.environ["DJANGO_COLORS"] = "nocolor"
|
||||
super().execute(*args, **options)
|
||||
|
||||
def get_handler(self, *args, **options):
|
||||
"""Return the default WSGI handler for the runner."""
|
||||
return get_internal_wsgi_application()
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not settings.DEBUG and not settings.ALLOWED_HOSTS:
|
||||
raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.')
|
||||
|
||||
self.use_ipv6 = options['use_ipv6']
|
||||
if self.use_ipv6 and not socket.has_ipv6:
|
||||
raise CommandError('Your Python does not support IPv6.')
|
||||
self._raw_ipv6 = False
|
||||
if not options['addrport']:
|
||||
self.addr = ''
|
||||
self.port = self.default_port
|
||||
else:
|
||||
m = re.match(naiveip_re, options['addrport'])
|
||||
if m is None:
|
||||
raise CommandError('"%s" is not a valid port number '
|
||||
'or address:port pair.' % options['addrport'])
|
||||
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
|
||||
if not self.port.isdigit():
|
||||
raise CommandError("%r is not a valid port number." % self.port)
|
||||
if self.addr:
|
||||
if _ipv6:
|
||||
self.addr = self.addr[1:-1]
|
||||
self.use_ipv6 = True
|
||||
self._raw_ipv6 = True
|
||||
elif self.use_ipv6 and not _fqdn:
|
||||
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
|
||||
if not self.addr:
|
||||
self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr
|
||||
self._raw_ipv6 = self.use_ipv6
|
||||
self.run(**options)
|
||||
|
||||
def run(self, **options):
|
||||
"""Run the server, using the autoreloader if needed."""
|
||||
use_reloader = options['use_reloader']
|
||||
|
||||
if use_reloader:
|
||||
autoreload.run_with_reloader(self.inner_run, **options)
|
||||
else:
|
||||
self.inner_run(None, **options)
|
||||
|
||||
def inner_run(self, *args, **options):
|
||||
# If an exception was silenced in ManagementUtility.execute in order
|
||||
# to be raised in the child process, raise it now.
|
||||
autoreload.raise_last_exception()
|
||||
|
||||
threading = options['use_threading']
|
||||
# 'shutdown_message' is a stealth option.
|
||||
shutdown_message = options.get('shutdown_message', '')
|
||||
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
|
||||
|
||||
if not options['skip_checks']:
|
||||
self.stdout.write('Performing system checks...\n\n')
|
||||
self.check(display_num_errors=True)
|
||||
# Need to check migrations here, so can't use the
|
||||
# requires_migrations_check attribute.
|
||||
self.check_migrations()
|
||||
now = datetime.now().strftime('%B %d, %Y - %X')
|
||||
self.stdout.write(now)
|
||||
self.stdout.write((
|
||||
"Django version %(version)s, using settings %(settings)r\n"
|
||||
"Starting development server at %(protocol)s://%(addr)s:%(port)s/\n"
|
||||
"Quit the server with %(quit_command)s."
|
||||
) % {
|
||||
"version": self.get_version(),
|
||||
"settings": settings.SETTINGS_MODULE,
|
||||
"protocol": self.protocol,
|
||||
"addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
|
||||
"port": self.port,
|
||||
"quit_command": quit_command,
|
||||
})
|
||||
|
||||
try:
|
||||
handler = self.get_handler(*args, **options)
|
||||
run(self.addr, int(self.port), handler,
|
||||
ipv6=self.use_ipv6, threading=threading, server_cls=self.server_cls)
|
||||
except OSError as e:
|
||||
# Use helpful error messages instead of ugly tracebacks.
|
||||
ERRORS = {
|
||||
errno.EACCES: "You don't have permission to access that port.",
|
||||
errno.EADDRINUSE: "That port is already in use.",
|
||||
errno.EADDRNOTAVAIL: "That IP address can't be assigned to.",
|
||||
}
|
||||
try:
|
||||
error_text = ERRORS[e.errno]
|
||||
except KeyError:
|
||||
error_text = e
|
||||
self.stderr.write("Error: %s" % error_text)
|
||||
# Need to use an OS exit because sys.exit doesn't work in a thread
|
||||
os._exit(1)
|
||||
except KeyboardInterrupt:
|
||||
if shutdown_message:
|
||||
self.stdout.write(shutdown_message)
|
||||
sys.exit(0)
|
@@ -0,0 +1,40 @@
|
||||
import socket
|
||||
|
||||
from django.core.mail import mail_admins, mail_managers, send_mail
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sends a test email to the email addresses specified as arguments."
|
||||
missing_args_message = "You must specify some email recipients, or pass the --managers or --admin options."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'email', nargs='*',
|
||||
help='One or more email addresses to send a test email to.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--managers', action='store_true',
|
||||
help='Send a test email to the addresses specified in settings.MANAGERS.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--admins', action='store_true',
|
||||
help='Send a test email to the addresses specified in settings.ADMINS.',
|
||||
)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
subject = 'Test email from %s on %s' % (socket.gethostname(), timezone.now())
|
||||
|
||||
send_mail(
|
||||
subject=subject,
|
||||
message="If you\'re reading this, it was successful.",
|
||||
from_email=None,
|
||||
recipient_list=kwargs['email'],
|
||||
)
|
||||
|
||||
if kwargs['managers']:
|
||||
mail_managers(subject, "This email was sent to the site managers.")
|
||||
|
||||
if kwargs['admins']:
|
||||
mail_admins(subject, "This email was sent to the site admins.")
|
115
venv/Lib/site-packages/django/core/management/commands/shell.py
Normal file
115
venv/Lib/site-packages/django/core/management/commands/shell.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import os
|
||||
import select
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from django.core.management import BaseCommand, CommandError
|
||||
from django.utils.datastructures import OrderedSet
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Runs a Python interactive interpreter. Tries to use IPython or "
|
||||
"bpython, if one of them is available. Any standard input is executed "
|
||||
"as code."
|
||||
)
|
||||
|
||||
requires_system_checks = []
|
||||
shells = ['ipython', 'bpython', 'python']
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--no-startup', action='store_true',
|
||||
help='When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-i', '--interface', choices=self.shells,
|
||||
help='Specify an interactive interpreter interface. Available options: "ipython", "bpython", and "python"',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-c', '--command',
|
||||
help='Instead of opening an interactive shell, run a command as Django and exit.',
|
||||
)
|
||||
|
||||
def ipython(self, options):
|
||||
from IPython import start_ipython
|
||||
start_ipython(argv=[])
|
||||
|
||||
def bpython(self, options):
|
||||
import bpython
|
||||
bpython.embed()
|
||||
|
||||
def python(self, options):
|
||||
import code
|
||||
|
||||
# Set up a dictionary to serve as the environment for the shell.
|
||||
imported_objects = {}
|
||||
|
||||
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
|
||||
# conventions and get $PYTHONSTARTUP first then .pythonrc.py.
|
||||
if not options['no_startup']:
|
||||
for pythonrc in OrderedSet([os.environ.get("PYTHONSTARTUP"), os.path.expanduser('~/.pythonrc.py')]):
|
||||
if not pythonrc:
|
||||
continue
|
||||
if not os.path.isfile(pythonrc):
|
||||
continue
|
||||
with open(pythonrc) as handle:
|
||||
pythonrc_code = handle.read()
|
||||
# Match the behavior of the cpython shell where an error in
|
||||
# PYTHONSTARTUP prints an exception and continues.
|
||||
try:
|
||||
exec(compile(pythonrc_code, pythonrc, 'exec'), imported_objects)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
|
||||
# By default, this will set up readline to do tab completion and to read and
|
||||
# write history to the .python_history file, but this can be overridden by
|
||||
# $PYTHONSTARTUP or ~/.pythonrc.py.
|
||||
try:
|
||||
hook = sys.__interactivehook__
|
||||
except AttributeError:
|
||||
# Match the behavior of the cpython shell where a missing
|
||||
# sys.__interactivehook__ is ignored.
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
hook()
|
||||
except Exception:
|
||||
# Match the behavior of the cpython shell where an error in
|
||||
# sys.__interactivehook__ prints a warning and the exception
|
||||
# and continues.
|
||||
print('Failed calling sys.__interactivehook__')
|
||||
traceback.print_exc()
|
||||
|
||||
# Set up tab completion for objects imported by $PYTHONSTARTUP or
|
||||
# ~/.pythonrc.py.
|
||||
try:
|
||||
import readline
|
||||
import rlcompleter
|
||||
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Start the interactive interpreter.
|
||||
code.interact(local=imported_objects)
|
||||
|
||||
def handle(self, **options):
|
||||
# Execute the command and exit.
|
||||
if options['command']:
|
||||
exec(options['command'], globals())
|
||||
return
|
||||
|
||||
# Execute stdin if it has anything to read and exit.
|
||||
# Not supported on Windows due to select.select() limitations.
|
||||
if sys.platform != 'win32' and not sys.stdin.isatty() and select.select([sys.stdin], [], [], 0)[0]:
|
||||
exec(sys.stdin.read(), globals())
|
||||
return
|
||||
|
||||
available_shells = [options['interface']] if options['interface'] else self.shells
|
||||
|
||||
for shell in available_shells:
|
||||
try:
|
||||
return getattr(self, shell)(options)
|
||||
except ImportError:
|
||||
pass
|
||||
raise CommandError("Couldn't import {} interface.".format(shell))
|
@@ -0,0 +1,157 @@
|
||||
import sys
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
from django.db.migrations.loader import MigrationLoader
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Shows all available migrations for the current project"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'app_label', nargs='*',
|
||||
help='App labels of applications to limit the output to.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help=(
|
||||
'Nominates a database to show migrations for. Defaults to the '
|
||||
'"default" database.'
|
||||
),
|
||||
)
|
||||
|
||||
formats = parser.add_mutually_exclusive_group()
|
||||
formats.add_argument(
|
||||
'--list', '-l', action='store_const', dest='format', const='list',
|
||||
help=(
|
||||
'Shows a list of all migrations and which are applied. '
|
||||
'With a verbosity level of 2 or above, the applied datetimes '
|
||||
'will be included.'
|
||||
),
|
||||
)
|
||||
formats.add_argument(
|
||||
'--plan', '-p', action='store_const', dest='format', const='plan',
|
||||
help=(
|
||||
'Shows all migrations in the order they will be applied. '
|
||||
'With a verbosity level of 2 or above all direct migration dependencies '
|
||||
'and reverse dependencies (run_before) will be included.'
|
||||
)
|
||||
)
|
||||
|
||||
parser.set_defaults(format='list')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = options['verbosity']
|
||||
|
||||
# Get the database we're operating from
|
||||
db = options['database']
|
||||
connection = connections[db]
|
||||
|
||||
if options['format'] == "plan":
|
||||
return self.show_plan(connection, options['app_label'])
|
||||
else:
|
||||
return self.show_list(connection, options['app_label'])
|
||||
|
||||
def _validate_app_names(self, loader, app_names):
|
||||
has_bad_names = False
|
||||
for app_name in app_names:
|
||||
try:
|
||||
apps.get_app_config(app_name)
|
||||
except LookupError as err:
|
||||
self.stderr.write(str(err))
|
||||
has_bad_names = True
|
||||
if has_bad_names:
|
||||
sys.exit(2)
|
||||
|
||||
def show_list(self, connection, app_names=None):
|
||||
"""
|
||||
Show a list of all migrations on the system, or only those of
|
||||
some named apps.
|
||||
"""
|
||||
# Load migrations from disk/DB
|
||||
loader = MigrationLoader(connection, ignore_no_migrations=True)
|
||||
recorder = MigrationRecorder(connection)
|
||||
recorded_migrations = recorder.applied_migrations()
|
||||
graph = loader.graph
|
||||
# If we were passed a list of apps, validate it
|
||||
if app_names:
|
||||
self._validate_app_names(loader, app_names)
|
||||
# Otherwise, show all apps in alphabetic order
|
||||
else:
|
||||
app_names = sorted(loader.migrated_apps)
|
||||
# For each app, print its migrations in order from oldest (roots) to
|
||||
# newest (leaves).
|
||||
for app_name in app_names:
|
||||
self.stdout.write(app_name, self.style.MIGRATE_LABEL)
|
||||
shown = set()
|
||||
for node in graph.leaf_nodes(app_name):
|
||||
for plan_node in graph.forwards_plan(node):
|
||||
if plan_node not in shown and plan_node[0] == app_name:
|
||||
# Give it a nice title if it's a squashed one
|
||||
title = plan_node[1]
|
||||
if graph.nodes[plan_node].replaces:
|
||||
title += " (%s squashed migrations)" % len(graph.nodes[plan_node].replaces)
|
||||
applied_migration = loader.applied_migrations.get(plan_node)
|
||||
# Mark it as applied/unapplied
|
||||
if applied_migration:
|
||||
if plan_node in recorded_migrations:
|
||||
output = ' [X] %s' % title
|
||||
else:
|
||||
title += " Run 'manage.py migrate' to finish recording."
|
||||
output = ' [-] %s' % title
|
||||
if self.verbosity >= 2 and hasattr(applied_migration, 'applied'):
|
||||
output += ' (applied at %s)' % applied_migration.applied.strftime('%Y-%m-%d %H:%M:%S')
|
||||
self.stdout.write(output)
|
||||
else:
|
||||
self.stdout.write(" [ ] %s" % title)
|
||||
shown.add(plan_node)
|
||||
# If we didn't print anything, then a small message
|
||||
if not shown:
|
||||
self.stdout.write(" (no migrations)", self.style.ERROR)
|
||||
|
||||
def show_plan(self, connection, app_names=None):
|
||||
"""
|
||||
Show all known migrations (or only those of the specified app_names)
|
||||
in the order they will be applied.
|
||||
"""
|
||||
# Load migrations from disk/DB
|
||||
loader = MigrationLoader(connection)
|
||||
graph = loader.graph
|
||||
if app_names:
|
||||
self._validate_app_names(loader, app_names)
|
||||
targets = [key for key in graph.leaf_nodes() if key[0] in app_names]
|
||||
else:
|
||||
targets = graph.leaf_nodes()
|
||||
plan = []
|
||||
seen = set()
|
||||
|
||||
# Generate the plan
|
||||
for target in targets:
|
||||
for migration in graph.forwards_plan(target):
|
||||
if migration not in seen:
|
||||
node = graph.node_map[migration]
|
||||
plan.append(node)
|
||||
seen.add(migration)
|
||||
|
||||
# Output
|
||||
def print_deps(node):
|
||||
out = []
|
||||
for parent in sorted(node.parents):
|
||||
out.append("%s.%s" % parent.key)
|
||||
if out:
|
||||
return " ... (%s)" % ", ".join(out)
|
||||
return ""
|
||||
|
||||
for node in plan:
|
||||
deps = ""
|
||||
if self.verbosity >= 2:
|
||||
deps = print_deps(node)
|
||||
if node.key in loader.applied_migrations:
|
||||
self.stdout.write("[X] %s.%s%s" % (node.key[0], node.key[1], deps))
|
||||
else:
|
||||
self.stdout.write("[ ] %s.%s%s" % (node.key[0], node.key[1], deps))
|
||||
if not plan:
|
||||
self.stdout.write('(no migrations)', self.style.ERROR)
|
@@ -0,0 +1,25 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.sql import sql_flush
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Returns a list of the SQL statements required to return all tables in "
|
||||
"the database to the state they were in just after they were installed."
|
||||
)
|
||||
|
||||
output_transaction = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to print the SQL for. Defaults to the "default" database.',
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
sql_statements = sql_flush(self.style, connections[options['database']])
|
||||
if not sql_statements and options['verbosity'] >= 1:
|
||||
self.stderr.write('No tables found.')
|
||||
return '\n'.join(sql_statements)
|
@@ -0,0 +1,68 @@
|
||||
from django.apps import apps
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
from django.db.migrations.loader import AmbiguityError, MigrationLoader
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Prints the SQL statements for the named migration."
|
||||
|
||||
output_transaction = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('app_label', help='App label of the application containing the migration.')
|
||||
parser.add_argument('migration_name', help='Migration name to print the SQL for.')
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to create SQL for. Defaults to the "default" database.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backwards', action='store_true',
|
||||
help='Creates SQL to unapply the migration, rather than to apply it',
|
||||
)
|
||||
|
||||
def execute(self, *args, **options):
|
||||
# sqlmigrate doesn't support coloring its output but we need to force
|
||||
# no_color=True so that the BEGIN/COMMIT statements added by
|
||||
# output_transaction don't get colored either.
|
||||
options['no_color'] = True
|
||||
return super().execute(*args, **options)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get the database we're operating from
|
||||
connection = connections[options['database']]
|
||||
|
||||
# Load up a loader to get all the migration data, but don't replace
|
||||
# migrations.
|
||||
loader = MigrationLoader(connection, replace_migrations=False)
|
||||
|
||||
# Resolve command-line arguments into a migration
|
||||
app_label, migration_name = options['app_label'], options['migration_name']
|
||||
# Validate app_label
|
||||
try:
|
||||
apps.get_app_config(app_label)
|
||||
except LookupError as err:
|
||||
raise CommandError(str(err))
|
||||
if app_label not in loader.migrated_apps:
|
||||
raise CommandError("App '%s' does not have migrations" % app_label)
|
||||
try:
|
||||
migration = loader.get_migration_by_prefix(app_label, migration_name)
|
||||
except AmbiguityError:
|
||||
raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (
|
||||
migration_name, app_label))
|
||||
except KeyError:
|
||||
raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % (
|
||||
migration_name, app_label))
|
||||
target = (app_label, migration.name)
|
||||
|
||||
# Show begin/end around output for atomic migrations, if the database
|
||||
# supports transactional DDL.
|
||||
self.output_transaction = migration.atomic and connection.features.can_rollback_ddl
|
||||
|
||||
# Make a plan that represents just the requested migrations and show SQL
|
||||
# for it
|
||||
plan = [(loader.graph.nodes[target], options['backwards'])]
|
||||
sql_statements = loader.collect_sql(plan)
|
||||
if not sql_statements and options['verbosity'] >= 1:
|
||||
self.stderr.write('No operations found.')
|
||||
return '\n'.join(sql_statements)
|
@@ -0,0 +1,25 @@
|
||||
from django.core.management.base import AppCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
|
||||
|
||||
class Command(AppCommand):
|
||||
help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
|
||||
|
||||
output_transaction = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
'--database', default=DEFAULT_DB_ALIAS,
|
||||
help='Nominates a database to print the SQL for. Defaults to the "default" database.',
|
||||
)
|
||||
|
||||
def handle_app_config(self, app_config, **options):
|
||||
if app_config.models_module is None:
|
||||
return
|
||||
connection = connections[options['database']]
|
||||
models = app_config.get_models(include_auto_created=True)
|
||||
statements = connection.ops.sequence_reset_sql(self.style, models)
|
||||
if not statements and options['verbosity'] >= 1:
|
||||
self.stderr.write('No sequences found.')
|
||||
return '\n'.join(statements)
|
@@ -0,0 +1,218 @@
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, connections, migrations
|
||||
from django.db.migrations.loader import AmbiguityError, MigrationLoader
|
||||
from django.db.migrations.migration import SwappableTuple
|
||||
from django.db.migrations.optimizer import MigrationOptimizer
|
||||
from django.db.migrations.writer import MigrationWriter
|
||||
from django.utils.version import get_docs_version
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Squashes an existing set of migrations (from first until specified) into a single new one."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'app_label',
|
||||
help='App label of the application to squash migrations for.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'start_migration_name', nargs='?',
|
||||
help='Migrations will be squashed starting from and including this migration.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'migration_name',
|
||||
help='Migrations will be squashed until and including this migration.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-optimize', action='store_true',
|
||||
help='Do not try to optimize the squashed operations.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--squashed-name',
|
||||
help='Sets the name of the new squashed migration.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-header', action='store_false', dest='include_header',
|
||||
help='Do not add a header comment to the new squashed migration.',
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
|
||||
self.verbosity = options['verbosity']
|
||||
self.interactive = options['interactive']
|
||||
app_label = options['app_label']
|
||||
start_migration_name = options['start_migration_name']
|
||||
migration_name = options['migration_name']
|
||||
no_optimize = options['no_optimize']
|
||||
squashed_name = options['squashed_name']
|
||||
include_header = options['include_header']
|
||||
# Validate app_label.
|
||||
try:
|
||||
apps.get_app_config(app_label)
|
||||
except LookupError as err:
|
||||
raise CommandError(str(err))
|
||||
# Load the current graph state, check the app and migration they asked for exists
|
||||
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
|
||||
if app_label not in loader.migrated_apps:
|
||||
raise CommandError(
|
||||
"App '%s' does not have migrations (so squashmigrations on "
|
||||
"it makes no sense)" % app_label
|
||||
)
|
||||
|
||||
migration = self.find_migration(loader, app_label, migration_name)
|
||||
|
||||
# Work out the list of predecessor migrations
|
||||
migrations_to_squash = [
|
||||
loader.get_migration(al, mn)
|
||||
for al, mn in loader.graph.forwards_plan((migration.app_label, migration.name))
|
||||
if al == migration.app_label
|
||||
]
|
||||
|
||||
if start_migration_name:
|
||||
start_migration = self.find_migration(loader, app_label, start_migration_name)
|
||||
start = loader.get_migration(start_migration.app_label, start_migration.name)
|
||||
try:
|
||||
start_index = migrations_to_squash.index(start)
|
||||
migrations_to_squash = migrations_to_squash[start_index:]
|
||||
except ValueError:
|
||||
raise CommandError(
|
||||
"The migration '%s' cannot be found. Maybe it comes after "
|
||||
"the migration '%s'?\n"
|
||||
"Have a look at:\n"
|
||||
" python manage.py showmigrations %s\n"
|
||||
"to debug this issue." % (start_migration, migration, app_label)
|
||||
)
|
||||
|
||||
# Tell them what we're doing and optionally ask if we should proceed
|
||||
if self.verbosity > 0 or self.interactive:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Will squash the following migrations:"))
|
||||
for migration in migrations_to_squash:
|
||||
self.stdout.write(" - %s" % migration.name)
|
||||
|
||||
if self.interactive:
|
||||
answer = None
|
||||
while not answer or answer not in "yn":
|
||||
answer = input("Do you wish to proceed? [yN] ")
|
||||
if not answer:
|
||||
answer = "n"
|
||||
break
|
||||
else:
|
||||
answer = answer[0].lower()
|
||||
if answer != "y":
|
||||
return
|
||||
|
||||
# Load the operations from all those migrations and concat together,
|
||||
# along with collecting external dependencies and detecting
|
||||
# double-squashing
|
||||
operations = []
|
||||
dependencies = set()
|
||||
# We need to take all dependencies from the first migration in the list
|
||||
# as it may be 0002 depending on 0001
|
||||
first_migration = True
|
||||
for smigration in migrations_to_squash:
|
||||
if smigration.replaces:
|
||||
raise CommandError(
|
||||
"You cannot squash squashed migrations! Please transition "
|
||||
"it to a normal migration first: "
|
||||
"https://docs.djangoproject.com/en/%s/topics/migrations/#squashing-migrations" % get_docs_version()
|
||||
)
|
||||
operations.extend(smigration.operations)
|
||||
for dependency in smigration.dependencies:
|
||||
if isinstance(dependency, SwappableTuple):
|
||||
if settings.AUTH_USER_MODEL == dependency.setting:
|
||||
dependencies.add(("__setting__", "AUTH_USER_MODEL"))
|
||||
else:
|
||||
dependencies.add(dependency)
|
||||
elif dependency[0] != smigration.app_label or first_migration:
|
||||
dependencies.add(dependency)
|
||||
first_migration = False
|
||||
|
||||
if no_optimize:
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("(Skipping optimization.)"))
|
||||
new_operations = operations
|
||||
else:
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Optimizing..."))
|
||||
|
||||
optimizer = MigrationOptimizer()
|
||||
new_operations = optimizer.optimize(operations, migration.app_label)
|
||||
|
||||
if self.verbosity > 0:
|
||||
if len(new_operations) == len(operations):
|
||||
self.stdout.write(" No optimizations possible.")
|
||||
else:
|
||||
self.stdout.write(
|
||||
" Optimized from %s operations to %s operations." %
|
||||
(len(operations), len(new_operations))
|
||||
)
|
||||
|
||||
# Work out the value of replaces (any squashed ones we're re-squashing)
|
||||
# need to feed their replaces into ours
|
||||
replaces = []
|
||||
for migration in migrations_to_squash:
|
||||
if migration.replaces:
|
||||
replaces.extend(migration.replaces)
|
||||
else:
|
||||
replaces.append((migration.app_label, migration.name))
|
||||
|
||||
# Make a new migration with those operations
|
||||
subclass = type("Migration", (migrations.Migration,), {
|
||||
"dependencies": dependencies,
|
||||
"operations": new_operations,
|
||||
"replaces": replaces,
|
||||
})
|
||||
if start_migration_name:
|
||||
if squashed_name:
|
||||
# Use the name from --squashed-name.
|
||||
prefix, _ = start_migration.name.split('_', 1)
|
||||
name = '%s_%s' % (prefix, squashed_name)
|
||||
else:
|
||||
# Generate a name.
|
||||
name = '%s_squashed_%s' % (start_migration.name, migration.name)
|
||||
new_migration = subclass(name, app_label)
|
||||
else:
|
||||
name = '0001_%s' % (squashed_name or 'squashed_%s' % migration.name)
|
||||
new_migration = subclass(name, app_label)
|
||||
new_migration.initial = True
|
||||
|
||||
# Write out the new migration file
|
||||
writer = MigrationWriter(new_migration, include_header)
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(writer.as_string())
|
||||
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_HEADING('Created new squashed migration %s' % writer.path) + '\n'
|
||||
' You should commit this migration but leave the old ones in place;\n'
|
||||
' the new migration will be used for new installs. Once you are sure\n'
|
||||
' all instances of the codebase have applied the migrations you squashed,\n'
|
||||
' you can delete them.'
|
||||
)
|
||||
if writer.needs_manual_porting:
|
||||
self.stdout.write(
|
||||
self.style.MIGRATE_HEADING('Manual porting required') + '\n'
|
||||
' Your migrations contained functions that must be manually copied over,\n'
|
||||
' as we could not safely copy their implementation.\n'
|
||||
' See the comment at the top of the squashed migration for details.'
|
||||
)
|
||||
|
||||
def find_migration(self, loader, app_label, name):
|
||||
try:
|
||||
return loader.get_migration_by_prefix(app_label, name)
|
||||
except AmbiguityError:
|
||||
raise CommandError(
|
||||
"More than one migration matches '%s' in app '%s'. Please be "
|
||||
"more specific." % (name, app_label)
|
||||
)
|
||||
except KeyError:
|
||||
raise CommandError(
|
||||
"Cannot find a migration matching '%s' from app '%s'." %
|
||||
(name, app_label)
|
||||
)
|
@@ -0,0 +1,14 @@
|
||||
from django.core.management.templates import TemplateCommand
|
||||
|
||||
|
||||
class Command(TemplateCommand):
|
||||
help = (
|
||||
"Creates a Django app directory structure for the given app name in "
|
||||
"the current directory or optionally in the given directory."
|
||||
)
|
||||
missing_args_message = "You must provide an application name."
|
||||
|
||||
def handle(self, **options):
|
||||
app_name = options.pop('name')
|
||||
target = options.pop('directory')
|
||||
super().handle('app', app_name, target, **options)
|
@@ -0,0 +1,21 @@
|
||||
from django.core.checks.security.base import SECRET_KEY_INSECURE_PREFIX
|
||||
from django.core.management.templates import TemplateCommand
|
||||
|
||||
from ..utils import get_random_secret_key
|
||||
|
||||
|
||||
class Command(TemplateCommand):
|
||||
help = (
|
||||
"Creates a Django project directory structure for the given project "
|
||||
"name in the current directory or optionally in the given directory."
|
||||
)
|
||||
missing_args_message = "You must provide a project name."
|
||||
|
||||
def handle(self, **options):
|
||||
project_name = options.pop('name')
|
||||
target = options.pop('directory')
|
||||
|
||||
# Create a random SECRET_KEY to put it in the main settings.
|
||||
options['secret_key'] = SECRET_KEY_INSECURE_PREFIX + get_random_secret_key()
|
||||
|
||||
super().handle('project', project_name, target, **options)
|
@@ -0,0 +1,62 @@
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.utils import get_command_line_option
|
||||
from django.test.runner import get_max_test_processes
|
||||
from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Discover and run tests in the specified modules or the current directory.'
|
||||
|
||||
# DiscoverRunner runs the checks after databases are set up.
|
||||
requires_system_checks = []
|
||||
test_runner = None
|
||||
|
||||
def run_from_argv(self, argv):
|
||||
"""
|
||||
Pre-parse the command line to extract the value of the --testrunner
|
||||
option. This allows a test runner to define additional command line
|
||||
arguments.
|
||||
"""
|
||||
self.test_runner = get_command_line_option(argv, '--testrunner')
|
||||
super().run_from_argv(argv)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'args', metavar='test_label', nargs='*',
|
||||
help='Module paths to test; can be modulename, modulename.TestCase or modulename.TestCase.test_method'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--failfast', action='store_true',
|
||||
help='Tells Django to stop running the test suite after first failed test.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--testrunner',
|
||||
help='Tells Django to use specified test runner class instead of '
|
||||
'the one specified by the TEST_RUNNER setting.',
|
||||
)
|
||||
|
||||
test_runner_class = get_runner(settings, self.test_runner)
|
||||
|
||||
if hasattr(test_runner_class, 'add_arguments'):
|
||||
test_runner_class.add_arguments(parser)
|
||||
|
||||
def handle(self, *test_labels, **options):
|
||||
TestRunner = get_runner(settings, options['testrunner'])
|
||||
|
||||
time_keeper = TimeKeeper() if options.get('timing', False) else NullTimeKeeper()
|
||||
parallel = options.get('parallel')
|
||||
if parallel == 'auto':
|
||||
options['parallel'] = get_max_test_processes()
|
||||
test_runner = TestRunner(**options)
|
||||
with time_keeper.timed('Total run'):
|
||||
failures = test_runner.run_tests(test_labels)
|
||||
time_keeper.print_results()
|
||||
if failures:
|
||||
sys.exit(1)
|
@@ -0,0 +1,54 @@
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Runs a development server with data from the given fixture(s).'
|
||||
|
||||
requires_system_checks = []
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'args', metavar='fixture', nargs='*',
|
||||
help='Path(s) to fixtures to load before running the server.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--noinput', '--no-input', action='store_false', dest='interactive',
|
||||
help='Tells Django to NOT prompt the user for input of any kind.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--addrport', default='',
|
||||
help='Port number or ipaddr:port to run the server on.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ipv6', '-6', action='store_true', dest='use_ipv6',
|
||||
help='Tells Django to use an IPv6 address.',
|
||||
)
|
||||
|
||||
def handle(self, *fixture_labels, **options):
|
||||
verbosity = options['verbosity']
|
||||
interactive = options['interactive']
|
||||
|
||||
# Create a test database.
|
||||
db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive, serialize=False)
|
||||
|
||||
# Import the fixture data into the test database.
|
||||
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
|
||||
|
||||
# Run the development server. Turn off auto-reloading because it causes
|
||||
# a strange error -- it causes this handle() method to be called
|
||||
# multiple times.
|
||||
shutdown_message = (
|
||||
'\nServer stopped.\nNote that the test database, %r, has not been '
|
||||
'deleted. You can explore it on your own.' % db_name
|
||||
)
|
||||
use_threading = connection.features.test_db_allows_multiple_connections
|
||||
call_command(
|
||||
'runserver',
|
||||
addrport=options['addrport'],
|
||||
shutdown_message=shutdown_message,
|
||||
use_reloader=False,
|
||||
use_ipv6=options['use_ipv6'],
|
||||
use_threading=use_threading
|
||||
)
|
53
venv/Lib/site-packages/django/core/management/sql.py
Normal file
53
venv/Lib/site-packages/django/core/management/sql.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import sys
|
||||
|
||||
from django.apps import apps
|
||||
from django.db import models
|
||||
|
||||
|
||||
def sql_flush(style, connection, reset_sequences=True, allow_cascade=False):
|
||||
"""
|
||||
Return a list of the SQL statements used to flush the database.
|
||||
"""
|
||||
tables = connection.introspection.django_table_names(only_existing=True, include_views=False)
|
||||
return connection.ops.sql_flush(
|
||||
style,
|
||||
tables,
|
||||
reset_sequences=reset_sequences,
|
||||
allow_cascade=allow_cascade,
|
||||
)
|
||||
|
||||
|
||||
def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs):
|
||||
# Emit the pre_migrate signal for every application.
|
||||
for app_config in apps.get_app_configs():
|
||||
if app_config.models_module is None:
|
||||
continue
|
||||
if verbosity >= 2:
|
||||
stdout = kwargs.get('stdout', sys.stdout)
|
||||
stdout.write('Running pre-migrate handlers for application %s' % app_config.label)
|
||||
models.signals.pre_migrate.send(
|
||||
sender=app_config,
|
||||
app_config=app_config,
|
||||
verbosity=verbosity,
|
||||
interactive=interactive,
|
||||
using=db,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def emit_post_migrate_signal(verbosity, interactive, db, **kwargs):
|
||||
# Emit the post_migrate signal for every application.
|
||||
for app_config in apps.get_app_configs():
|
||||
if app_config.models_module is None:
|
||||
continue
|
||||
if verbosity >= 2:
|
||||
stdout = kwargs.get('stdout', sys.stdout)
|
||||
stdout.write('Running post-migrate handlers for application %s' % app_config.label)
|
||||
models.signals.post_migrate.send(
|
||||
sender=app_config,
|
||||
app_config=app_config,
|
||||
verbosity=verbosity,
|
||||
interactive=interactive,
|
||||
using=db,
|
||||
**kwargs
|
||||
)
|
356
venv/Lib/site-packages/django/core/management/templates.py
Normal file
356
venv/Lib/site-packages/django/core/management/templates.py
Normal file
@@ -0,0 +1,356 @@
|
||||
import argparse
|
||||
import cgi
|
||||
import mimetypes
|
||||
import os
|
||||
import posixpath
|
||||
import shutil
|
||||
import stat
|
||||
import tempfile
|
||||
from importlib import import_module
|
||||
from urllib.request import urlretrieve
|
||||
|
||||
import django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.utils import handle_extensions
|
||||
from django.template import Context, Engine
|
||||
from django.utils import archive
|
||||
from django.utils.version import get_docs_version
|
||||
|
||||
|
||||
class TemplateCommand(BaseCommand):
|
||||
"""
|
||||
Copy either a Django application layout template or a Django project
|
||||
layout template into the specified directory.
|
||||
|
||||
:param style: A color style object (see django.core.management.color).
|
||||
:param app_or_project: The string 'app' or 'project'.
|
||||
:param name: The name of the application or project.
|
||||
:param directory: The directory to which the template should be copied.
|
||||
:param options: The additional variables passed to project or app templates
|
||||
"""
|
||||
requires_system_checks = []
|
||||
# The supported URL schemes
|
||||
url_schemes = ['http', 'https', 'ftp']
|
||||
# Rewrite the following suffixes when determining the target filename.
|
||||
rewrite_template_suffixes = (
|
||||
# Allow shipping invalid .py files without byte-compilation.
|
||||
('.py-tpl', '.py'),
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('name', help='Name of the application or project.')
|
||||
parser.add_argument('directory', nargs='?', help='Optional destination directory')
|
||||
parser.add_argument('--template', help='The path or URL to load the template from.')
|
||||
parser.add_argument(
|
||||
'--extension', '-e', dest='extensions',
|
||||
action='append', default=['py'],
|
||||
help='The file extension(s) to render (default: "py"). '
|
||||
'Separate multiple extensions with commas, or use '
|
||||
'-e multiple times.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--name', '-n', dest='files',
|
||||
action='append', default=[],
|
||||
help='The file name(s) to render. Separate multiple file names '
|
||||
'with commas, or use -n multiple times.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--exclude', '-x',
|
||||
action='append', default=argparse.SUPPRESS, nargs='?', const='',
|
||||
help=(
|
||||
'The directory name(s) to exclude, in addition to .git and '
|
||||
'__pycache__. Can be used multiple times.'
|
||||
),
|
||||
)
|
||||
|
||||
def handle(self, app_or_project, name, target=None, **options):
|
||||
self.app_or_project = app_or_project
|
||||
self.a_or_an = 'an' if app_or_project == 'app' else 'a'
|
||||
self.paths_to_remove = []
|
||||
self.verbosity = options['verbosity']
|
||||
|
||||
self.validate_name(name)
|
||||
|
||||
# if some directory is given, make sure it's nicely expanded
|
||||
if target is None:
|
||||
top_dir = os.path.join(os.getcwd(), name)
|
||||
try:
|
||||
os.makedirs(top_dir)
|
||||
except FileExistsError:
|
||||
raise CommandError("'%s' already exists" % top_dir)
|
||||
except OSError as e:
|
||||
raise CommandError(e)
|
||||
else:
|
||||
top_dir = os.path.abspath(os.path.expanduser(target))
|
||||
if app_or_project == 'app':
|
||||
self.validate_name(os.path.basename(top_dir), 'directory')
|
||||
if not os.path.exists(top_dir):
|
||||
raise CommandError("Destination directory '%s' does not "
|
||||
"exist, please create it first." % top_dir)
|
||||
|
||||
extensions = tuple(handle_extensions(options['extensions']))
|
||||
extra_files = []
|
||||
excluded_directories = ['.git', '__pycache__']
|
||||
for file in options['files']:
|
||||
extra_files.extend(map(lambda x: x.strip(), file.split(',')))
|
||||
if exclude := options.get('exclude'):
|
||||
for directory in exclude:
|
||||
excluded_directories.append(directory.strip())
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write(
|
||||
'Rendering %s template files with extensions: %s'
|
||||
% (app_or_project, ', '.join(extensions))
|
||||
)
|
||||
self.stdout.write(
|
||||
'Rendering %s template files with filenames: %s'
|
||||
% (app_or_project, ', '.join(extra_files))
|
||||
)
|
||||
base_name = '%s_name' % app_or_project
|
||||
base_subdir = '%s_template' % app_or_project
|
||||
base_directory = '%s_directory' % app_or_project
|
||||
camel_case_name = 'camel_case_%s_name' % app_or_project
|
||||
camel_case_value = ''.join(x for x in name.title() if x != '_')
|
||||
|
||||
context = Context({
|
||||
**options,
|
||||
base_name: name,
|
||||
base_directory: top_dir,
|
||||
camel_case_name: camel_case_value,
|
||||
'docs_version': get_docs_version(),
|
||||
'django_version': django.__version__,
|
||||
}, autoescape=False)
|
||||
|
||||
# Setup a stub settings environment for template rendering
|
||||
if not settings.configured:
|
||||
settings.configure()
|
||||
django.setup()
|
||||
|
||||
template_dir = self.handle_template(options['template'],
|
||||
base_subdir)
|
||||
prefix_length = len(template_dir) + 1
|
||||
|
||||
for root, dirs, files in os.walk(template_dir):
|
||||
|
||||
path_rest = root[prefix_length:]
|
||||
relative_dir = path_rest.replace(base_name, name)
|
||||
if relative_dir:
|
||||
target_dir = os.path.join(top_dir, relative_dir)
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
|
||||
for dirname in dirs[:]:
|
||||
if 'exclude' not in options:
|
||||
if dirname.startswith('.') or dirname == '__pycache__':
|
||||
dirs.remove(dirname)
|
||||
elif dirname in excluded_directories:
|
||||
dirs.remove(dirname)
|
||||
|
||||
for filename in files:
|
||||
if filename.endswith(('.pyo', '.pyc', '.py.class')):
|
||||
# Ignore some files as they cause various breakages.
|
||||
continue
|
||||
old_path = os.path.join(root, filename)
|
||||
new_path = os.path.join(
|
||||
top_dir, relative_dir, filename.replace(base_name, name)
|
||||
)
|
||||
for old_suffix, new_suffix in self.rewrite_template_suffixes:
|
||||
if new_path.endswith(old_suffix):
|
||||
new_path = new_path[:-len(old_suffix)] + new_suffix
|
||||
break # Only rewrite once
|
||||
|
||||
if os.path.exists(new_path):
|
||||
raise CommandError(
|
||||
"%s already exists. Overlaying %s %s into an existing "
|
||||
"directory won't replace conflicting files." % (
|
||||
new_path, self.a_or_an, app_or_project,
|
||||
)
|
||||
)
|
||||
|
||||
# Only render the Python files, as we don't want to
|
||||
# accidentally render Django templates files
|
||||
if new_path.endswith(extensions) or filename in extra_files:
|
||||
with open(old_path, encoding='utf-8') as template_file:
|
||||
content = template_file.read()
|
||||
template = Engine().from_string(content)
|
||||
content = template.render(context)
|
||||
with open(new_path, 'w', encoding='utf-8') as new_file:
|
||||
new_file.write(content)
|
||||
else:
|
||||
shutil.copyfile(old_path, new_path)
|
||||
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write('Creating %s' % new_path)
|
||||
try:
|
||||
shutil.copymode(old_path, new_path)
|
||||
self.make_writeable(new_path)
|
||||
except OSError:
|
||||
self.stderr.write(
|
||||
"Notice: Couldn't set permission bits on %s. You're "
|
||||
"probably using an uncommon filesystem setup. No "
|
||||
"problem." % new_path, self.style.NOTICE)
|
||||
|
||||
if self.paths_to_remove:
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write('Cleaning up temporary files.')
|
||||
for path_to_remove in self.paths_to_remove:
|
||||
if os.path.isfile(path_to_remove):
|
||||
os.remove(path_to_remove)
|
||||
else:
|
||||
shutil.rmtree(path_to_remove)
|
||||
|
||||
def handle_template(self, template, subdir):
|
||||
"""
|
||||
Determine where the app or project templates are.
|
||||
Use django.__path__[0] as the default because the Django install
|
||||
directory isn't known.
|
||||
"""
|
||||
if template is None:
|
||||
return os.path.join(django.__path__[0], 'conf', subdir)
|
||||
else:
|
||||
if template.startswith('file://'):
|
||||
template = template[7:]
|
||||
expanded_template = os.path.expanduser(template)
|
||||
expanded_template = os.path.normpath(expanded_template)
|
||||
if os.path.isdir(expanded_template):
|
||||
return expanded_template
|
||||
if self.is_url(template):
|
||||
# downloads the file and returns the path
|
||||
absolute_path = self.download(template)
|
||||
else:
|
||||
absolute_path = os.path.abspath(expanded_template)
|
||||
if os.path.exists(absolute_path):
|
||||
return self.extract(absolute_path)
|
||||
|
||||
raise CommandError("couldn't handle %s template %s." %
|
||||
(self.app_or_project, template))
|
||||
|
||||
def validate_name(self, name, name_or_dir='name'):
|
||||
if name is None:
|
||||
raise CommandError('you must provide {an} {app} name'.format(
|
||||
an=self.a_or_an,
|
||||
app=self.app_or_project,
|
||||
))
|
||||
# Check it's a valid directory name.
|
||||
if not name.isidentifier():
|
||||
raise CommandError(
|
||||
"'{name}' is not a valid {app} {type}. Please make sure the "
|
||||
"{type} is a valid identifier.".format(
|
||||
name=name,
|
||||
app=self.app_or_project,
|
||||
type=name_or_dir,
|
||||
)
|
||||
)
|
||||
# Check it cannot be imported.
|
||||
try:
|
||||
import_module(name)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
raise CommandError(
|
||||
"'{name}' conflicts with the name of an existing Python "
|
||||
"module and cannot be used as {an} {app} {type}. Please try "
|
||||
"another {type}.".format(
|
||||
name=name,
|
||||
an=self.a_or_an,
|
||||
app=self.app_or_project,
|
||||
type=name_or_dir,
|
||||
)
|
||||
)
|
||||
|
||||
def download(self, url):
|
||||
"""
|
||||
Download the given URL and return the file name.
|
||||
"""
|
||||
def cleanup_url(url):
|
||||
tmp = url.rstrip('/')
|
||||
filename = tmp.split('/')[-1]
|
||||
if url.endswith('/'):
|
||||
display_url = tmp + '/'
|
||||
else:
|
||||
display_url = url
|
||||
return filename, display_url
|
||||
|
||||
prefix = 'django_%s_template_' % self.app_or_project
|
||||
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
|
||||
self.paths_to_remove.append(tempdir)
|
||||
filename, display_url = cleanup_url(url)
|
||||
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write('Downloading %s' % display_url)
|
||||
try:
|
||||
the_path, info = urlretrieve(url, os.path.join(tempdir, filename))
|
||||
except OSError as e:
|
||||
raise CommandError("couldn't download URL %s to %s: %s" %
|
||||
(url, filename, e))
|
||||
|
||||
used_name = the_path.split('/')[-1]
|
||||
|
||||
# Trying to get better name from response headers
|
||||
content_disposition = info.get('content-disposition')
|
||||
if content_disposition:
|
||||
_, params = cgi.parse_header(content_disposition)
|
||||
guessed_filename = params.get('filename') or used_name
|
||||
else:
|
||||
guessed_filename = used_name
|
||||
|
||||
# Falling back to content type guessing
|
||||
ext = self.splitext(guessed_filename)[1]
|
||||
content_type = info.get('content-type')
|
||||
if not ext and content_type:
|
||||
ext = mimetypes.guess_extension(content_type)
|
||||
if ext:
|
||||
guessed_filename += ext
|
||||
|
||||
# Move the temporary file to a filename that has better
|
||||
# chances of being recognized by the archive utils
|
||||
if used_name != guessed_filename:
|
||||
guessed_path = os.path.join(tempdir, guessed_filename)
|
||||
shutil.move(the_path, guessed_path)
|
||||
return guessed_path
|
||||
|
||||
# Giving up
|
||||
return the_path
|
||||
|
||||
def splitext(self, the_path):
|
||||
"""
|
||||
Like os.path.splitext, but takes off .tar, too
|
||||
"""
|
||||
base, ext = posixpath.splitext(the_path)
|
||||
if base.lower().endswith('.tar'):
|
||||
ext = base[-4:] + ext
|
||||
base = base[:-4]
|
||||
return base, ext
|
||||
|
||||
def extract(self, filename):
|
||||
"""
|
||||
Extract the given file to a temporary directory and return
|
||||
the path of the directory with the extracted content.
|
||||
"""
|
||||
prefix = 'django_%s_template_' % self.app_or_project
|
||||
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
|
||||
self.paths_to_remove.append(tempdir)
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write('Extracting %s' % filename)
|
||||
try:
|
||||
archive.extract(filename, tempdir)
|
||||
return tempdir
|
||||
except (archive.ArchiveException, OSError) as e:
|
||||
raise CommandError("couldn't extract file %s to %s: %s" %
|
||||
(filename, tempdir, e))
|
||||
|
||||
def is_url(self, template):
|
||||
"""Return True if the name looks like a URL."""
|
||||
if ':' not in template:
|
||||
return False
|
||||
scheme = template.split(':', 1)[0].lower()
|
||||
return scheme in self.url_schemes
|
||||
|
||||
def make_writeable(self, filename):
|
||||
"""
|
||||
Make sure that the file is writeable.
|
||||
Useful if our source is read-only.
|
||||
"""
|
||||
if not os.access(filename, os.W_OK):
|
||||
st = os.stat(filename)
|
||||
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
|
||||
os.chmod(filename, new_permissions)
|
153
venv/Lib/site-packages/django/core/management/utils.py
Normal file
153
venv/Lib/site-packages/django/core/management/utils.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import fnmatch
|
||||
import os
|
||||
from pathlib import Path
|
||||
from subprocess import PIPE, run
|
||||
|
||||
from django.apps import apps as installed_apps
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
|
||||
|
||||
from .base import CommandError, CommandParser
|
||||
|
||||
|
||||
def popen_wrapper(args, stdout_encoding='utf-8'):
|
||||
"""
|
||||
Friendly wrapper around Popen.
|
||||
|
||||
Return stdout output, stderr output, and OS status code.
|
||||
"""
|
||||
try:
|
||||
p = run(args, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt')
|
||||
except OSError as err:
|
||||
raise CommandError('Error executing %s' % args[0]) from err
|
||||
return (
|
||||
p.stdout.decode(stdout_encoding),
|
||||
p.stderr.decode(DEFAULT_LOCALE_ENCODING, errors='replace'),
|
||||
p.returncode
|
||||
)
|
||||
|
||||
|
||||
def handle_extensions(extensions):
|
||||
"""
|
||||
Organize multiple extensions that are separated with commas or passed by
|
||||
using --extension/-e multiple times.
|
||||
|
||||
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
|
||||
would result in an extension list: ['.js', '.txt', '.xhtml']
|
||||
|
||||
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
|
||||
{'.html', '.js', '.py'}
|
||||
>>> handle_extensions(['.html, txt,.tpl'])
|
||||
{'.html', '.tpl', '.txt'}
|
||||
"""
|
||||
ext_list = []
|
||||
for ext in extensions:
|
||||
ext_list.extend(ext.replace(' ', '').split(','))
|
||||
for i, ext in enumerate(ext_list):
|
||||
if not ext.startswith('.'):
|
||||
ext_list[i] = '.%s' % ext_list[i]
|
||||
return set(ext_list)
|
||||
|
||||
|
||||
def find_command(cmd, path=None, pathext=None):
|
||||
if path is None:
|
||||
path = os.environ.get('PATH', '').split(os.pathsep)
|
||||
if isinstance(path, str):
|
||||
path = [path]
|
||||
# check if there are funny path extensions for executables, e.g. Windows
|
||||
if pathext is None:
|
||||
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
|
||||
# don't use extensions if the command ends with one of them
|
||||
for ext in pathext:
|
||||
if cmd.endswith(ext):
|
||||
pathext = ['']
|
||||
break
|
||||
# check if we find the command on PATH
|
||||
for p in path:
|
||||
f = os.path.join(p, cmd)
|
||||
if os.path.isfile(f):
|
||||
return f
|
||||
for ext in pathext:
|
||||
fext = f + ext
|
||||
if os.path.isfile(fext):
|
||||
return fext
|
||||
return None
|
||||
|
||||
|
||||
def get_random_secret_key():
|
||||
"""
|
||||
Return a 50 character random string usable as a SECRET_KEY setting value.
|
||||
"""
|
||||
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
|
||||
return get_random_string(50, chars)
|
||||
|
||||
|
||||
def parse_apps_and_model_labels(labels):
|
||||
"""
|
||||
Parse a list of "app_label.ModelName" or "app_label" strings into actual
|
||||
objects and return a two-element tuple:
|
||||
(set of model classes, set of app_configs).
|
||||
Raise a CommandError if some specified models or apps don't exist.
|
||||
"""
|
||||
apps = set()
|
||||
models = set()
|
||||
|
||||
for label in labels:
|
||||
if '.' in label:
|
||||
try:
|
||||
model = installed_apps.get_model(label)
|
||||
except LookupError:
|
||||
raise CommandError('Unknown model: %s' % label)
|
||||
models.add(model)
|
||||
else:
|
||||
try:
|
||||
app_config = installed_apps.get_app_config(label)
|
||||
except LookupError as e:
|
||||
raise CommandError(str(e))
|
||||
apps.add(app_config)
|
||||
|
||||
return models, apps
|
||||
|
||||
|
||||
def get_command_line_option(argv, option):
|
||||
"""
|
||||
Return the value of a command line option (which should include leading
|
||||
dashes, e.g. '--testrunner') from an argument list. Return None if the
|
||||
option wasn't passed or if the argument list couldn't be parsed.
|
||||
"""
|
||||
parser = CommandParser(add_help=False, allow_abbrev=False)
|
||||
parser.add_argument(option, dest='value')
|
||||
try:
|
||||
options, _ = parser.parse_known_args(argv[2:])
|
||||
except CommandError:
|
||||
return None
|
||||
else:
|
||||
return options.value
|
||||
|
||||
|
||||
def normalize_path_patterns(patterns):
|
||||
"""Normalize an iterable of glob style patterns based on OS."""
|
||||
patterns = [os.path.normcase(p) for p in patterns]
|
||||
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
|
||||
norm_patterns = []
|
||||
for pattern in patterns:
|
||||
for dir_suffix in dir_suffixes:
|
||||
if pattern.endswith(dir_suffix):
|
||||
norm_patterns.append(pattern[:-len(dir_suffix)])
|
||||
break
|
||||
else:
|
||||
norm_patterns.append(pattern)
|
||||
return norm_patterns
|
||||
|
||||
|
||||
def is_ignored_path(path, ignore_patterns):
|
||||
"""
|
||||
Check if the given path should be ignored or not based on matching
|
||||
one of the glob style `ignore_patterns`.
|
||||
"""
|
||||
path = Path(path)
|
||||
|
||||
def ignore(pattern):
|
||||
return fnmatch.fnmatchcase(path.name, pattern) or fnmatch.fnmatchcase(str(path), pattern)
|
||||
|
||||
return any(ignore(pattern) for pattern in normalize_path_patterns(ignore_patterns))
|
Reference in New Issue
Block a user