Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +2 -0
- testbed/django__django/django/core/__init__.py +0 -0
- testbed/django__django/django/core/asgi.py +13 -0
- testbed/django__django/django/core/exceptions.py +254 -0
- testbed/django__django/django/core/files/temp.py +79 -0
- testbed/django__django/django/core/files/utils.py +78 -0
- testbed/django__django/django/core/mail/__init__.py +154 -0
- testbed/django__django/django/core/mail/backends/__init__.py +1 -0
- testbed/django__django/django/core/mail/backends/console.py +44 -0
- testbed/django__django/django/core/mail/backends/dummy.py +10 -0
- testbed/django__django/django/core/mail/backends/filebased.py +66 -0
- testbed/django__django/django/core/mail/backends/locmem.py +31 -0
- testbed/django__django/django/core/mail/utils.py +22 -0
- testbed/django__django/django/core/management/__init__.py +442 -0
- testbed/django__django/django/core/management/base.py +688 -0
- testbed/django__django/django/core/management/color.py +113 -0
- testbed/django__django/django/core/management/commands/__init__.py +0 -0
- testbed/django__django/django/core/management/commands/check.py +83 -0
- testbed/django__django/django/core/management/commands/compilemessages.py +195 -0
- testbed/django__django/django/core/management/commands/createcachetable.py +130 -0
- testbed/django__django/django/core/management/commands/diffsettings.py +91 -0
- testbed/django__django/django/core/management/commands/dumpdata.py +281 -0
- testbed/django__django/django/core/management/commands/flush.py +92 -0
- testbed/django__django/django/core/management/commands/inspectdb.py +414 -0
- testbed/django__django/django/core/management/commands/loaddata.py +432 -0
- testbed/django__django/django/core/management/commands/makemessages.py +783 -0
- testbed/django__django/django/core/management/commands/makemigrations.py +513 -0
- testbed/django__django/django/core/management/commands/migrate.py +511 -0
- testbed/django__django/django/core/management/commands/optimizemigration.py +129 -0
- testbed/django__django/django/core/management/commands/runserver.py +186 -0
- testbed/django__django/django/core/management/commands/shell.py +139 -0
- testbed/django__django/django/core/management/commands/showmigrations.py +176 -0
- testbed/django__django/django/core/management/commands/sqlflush.py +29 -0
- testbed/django__django/django/core/management/commands/sqlmigrate.py +83 -0
- testbed/django__django/django/core/management/commands/sqlsequencereset.py +31 -0
- testbed/django__django/django/core/management/commands/squashmigrations.py +267 -0
- testbed/django__django/django/core/management/commands/startapp.py +14 -0
- testbed/django__django/django/core/management/commands/startproject.py +21 -0
- testbed/django__django/django/core/management/commands/test.py +71 -0
- testbed/django__django/django/core/management/commands/testserver.py +65 -0
- testbed/django__django/django/core/management/sql.py +59 -0
- testbed/django__django/django/core/management/templates.py +406 -0
- testbed/django__django/django/core/management/utils.py +175 -0
- testbed/django__django/django/core/paginator.py +238 -0
- testbed/django__django/django/core/serializers/__init__.py +254 -0
- testbed/django__django/django/core/serializers/base.py +386 -0
- testbed/django__django/django/core/serializers/json.py +106 -0
- testbed/django__django/django/core/serializers/jsonl.py +57 -0
- testbed/django__django/django/core/serializers/python.py +192 -0
- testbed/django__django/django/core/serializers/pyyaml.py +82 -0
.gitattributes
CHANGED
|
@@ -98,3 +98,5 @@ testbed/pydicom__pydicom/pydicom/data/test_files/OT-PAL-8-face.dcm filter=lfs di
|
|
| 98 |
testbed/pydicom__pydicom/pydicom/data/test_files/SC_rgb_32bit.dcm filter=lfs diff=lfs merge=lfs -text
|
| 99 |
testbed/pydicom__pydicom/pydicom/data/test_files/OBXXXX1A.dcm filter=lfs diff=lfs merge=lfs -text
|
| 100 |
testbed/pvlib__pvlib-python/pvlib/data/aod550_tcwv_20121101_test.nc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 98 |
testbed/pydicom__pydicom/pydicom/data/test_files/SC_rgb_32bit.dcm filter=lfs diff=lfs merge=lfs -text
|
| 99 |
testbed/pydicom__pydicom/pydicom/data/test_files/OBXXXX1A.dcm filter=lfs diff=lfs merge=lfs -text
|
| 100 |
testbed/pvlib__pvlib-python/pvlib/data/aod550_tcwv_20121101_test.nc filter=lfs diff=lfs merge=lfs -text
|
| 101 |
+
testbed/mwaskom__seaborn/doc/_static/favicon_old.ico filter=lfs diff=lfs merge=lfs -text
|
| 102 |
+
testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.pdf filter=lfs diff=lfs merge=lfs -text
|
testbed/django__django/django/core/__init__.py
ADDED
|
File without changes
|
testbed/django__django/django/core/asgi.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import django
|
| 2 |
+
from django.core.handlers.asgi import ASGIHandler
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def get_asgi_application():
|
| 6 |
+
"""
|
| 7 |
+
The public interface to Django's ASGI support. Return an ASGI 3 callable.
|
| 8 |
+
|
| 9 |
+
Avoids making django.core.handlers.ASGIHandler a public API, in case the
|
| 10 |
+
internal implementation changes or moves in the future.
|
| 11 |
+
"""
|
| 12 |
+
django.setup(set_prefix=False)
|
| 13 |
+
return ASGIHandler()
|
testbed/django__django/django/core/exceptions.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Global Django exception and warning classes.
|
| 3 |
+
"""
|
| 4 |
+
import operator
|
| 5 |
+
|
| 6 |
+
from django.utils.hashable import make_hashable
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class FieldDoesNotExist(Exception):
|
| 10 |
+
"""The requested model field does not exist"""
|
| 11 |
+
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class AppRegistryNotReady(Exception):
|
| 16 |
+
"""The django.apps registry is not populated yet"""
|
| 17 |
+
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class ObjectDoesNotExist(Exception):
|
| 22 |
+
"""The requested object does not exist"""
|
| 23 |
+
|
| 24 |
+
silent_variable_failure = True
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class MultipleObjectsReturned(Exception):
|
| 28 |
+
"""The query returned multiple objects when only one was expected."""
|
| 29 |
+
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class SuspiciousOperation(Exception):
|
| 34 |
+
"""The user did something suspicious"""
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class SuspiciousMultipartForm(SuspiciousOperation):
|
| 38 |
+
"""Suspect MIME request in multipart form data"""
|
| 39 |
+
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class SuspiciousFileOperation(SuspiciousOperation):
|
| 44 |
+
"""A Suspicious filesystem operation was attempted"""
|
| 45 |
+
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class DisallowedHost(SuspiciousOperation):
|
| 50 |
+
"""HTTP_HOST header contains invalid value"""
|
| 51 |
+
|
| 52 |
+
pass
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class DisallowedRedirect(SuspiciousOperation):
|
| 56 |
+
"""Redirect to scheme not in allowed list"""
|
| 57 |
+
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class TooManyFieldsSent(SuspiciousOperation):
|
| 62 |
+
"""
|
| 63 |
+
The number of fields in a GET or POST request exceeded
|
| 64 |
+
settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
pass
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class TooManyFilesSent(SuspiciousOperation):
|
| 71 |
+
"""
|
| 72 |
+
The number of fields in a GET or POST request exceeded
|
| 73 |
+
settings.DATA_UPLOAD_MAX_NUMBER_FILES.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
pass
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class RequestDataTooBig(SuspiciousOperation):
|
| 80 |
+
"""
|
| 81 |
+
The size of the request (excluding any file uploads) exceeded
|
| 82 |
+
settings.DATA_UPLOAD_MAX_MEMORY_SIZE.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
pass
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class RequestAborted(Exception):
|
| 89 |
+
"""The request was closed before it was completed, or timed out."""
|
| 90 |
+
|
| 91 |
+
pass
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class BadRequest(Exception):
|
| 95 |
+
"""The request is malformed and cannot be processed."""
|
| 96 |
+
|
| 97 |
+
pass
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class PermissionDenied(Exception):
|
| 101 |
+
"""The user did not have permission to do that"""
|
| 102 |
+
|
| 103 |
+
pass
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class ViewDoesNotExist(Exception):
|
| 107 |
+
"""The requested view does not exist"""
|
| 108 |
+
|
| 109 |
+
pass
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class MiddlewareNotUsed(Exception):
|
| 113 |
+
"""This middleware is not used in this server configuration"""
|
| 114 |
+
|
| 115 |
+
pass
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class ImproperlyConfigured(Exception):
|
| 119 |
+
"""Django is somehow improperly configured"""
|
| 120 |
+
|
| 121 |
+
pass
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class FieldError(Exception):
|
| 125 |
+
"""Some kind of problem with a model field."""
|
| 126 |
+
|
| 127 |
+
pass
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
NON_FIELD_ERRORS = "__all__"
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class ValidationError(Exception):
|
| 134 |
+
"""An error while validating data."""
|
| 135 |
+
|
| 136 |
+
def __init__(self, message, code=None, params=None):
|
| 137 |
+
"""
|
| 138 |
+
The `message` argument can be a single error, a list of errors, or a
|
| 139 |
+
dictionary that maps field names to lists of errors. What we define as
|
| 140 |
+
an "error" can be either a simple string or an instance of
|
| 141 |
+
ValidationError with its message attribute set, and what we define as
|
| 142 |
+
list or dictionary can be an actual `list` or `dict` or an instance
|
| 143 |
+
of ValidationError with its `error_list` or `error_dict` attribute set.
|
| 144 |
+
"""
|
| 145 |
+
super().__init__(message, code, params)
|
| 146 |
+
|
| 147 |
+
if isinstance(message, ValidationError):
|
| 148 |
+
if hasattr(message, "error_dict"):
|
| 149 |
+
message = message.error_dict
|
| 150 |
+
elif not hasattr(message, "message"):
|
| 151 |
+
message = message.error_list
|
| 152 |
+
else:
|
| 153 |
+
message, code, params = message.message, message.code, message.params
|
| 154 |
+
|
| 155 |
+
if isinstance(message, dict):
|
| 156 |
+
self.error_dict = {}
|
| 157 |
+
for field, messages in message.items():
|
| 158 |
+
if not isinstance(messages, ValidationError):
|
| 159 |
+
messages = ValidationError(messages)
|
| 160 |
+
self.error_dict[field] = messages.error_list
|
| 161 |
+
|
| 162 |
+
elif isinstance(message, list):
|
| 163 |
+
self.error_list = []
|
| 164 |
+
for message in message:
|
| 165 |
+
# Normalize plain strings to instances of ValidationError.
|
| 166 |
+
if not isinstance(message, ValidationError):
|
| 167 |
+
message = ValidationError(message)
|
| 168 |
+
if hasattr(message, "error_dict"):
|
| 169 |
+
self.error_list.extend(sum(message.error_dict.values(), []))
|
| 170 |
+
else:
|
| 171 |
+
self.error_list.extend(message.error_list)
|
| 172 |
+
|
| 173 |
+
else:
|
| 174 |
+
self.message = message
|
| 175 |
+
self.code = code
|
| 176 |
+
self.params = params
|
| 177 |
+
self.error_list = [self]
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def message_dict(self):
|
| 181 |
+
# Trigger an AttributeError if this ValidationError
|
| 182 |
+
# doesn't have an error_dict.
|
| 183 |
+
getattr(self, "error_dict")
|
| 184 |
+
|
| 185 |
+
return dict(self)
|
| 186 |
+
|
| 187 |
+
@property
|
| 188 |
+
def messages(self):
|
| 189 |
+
if hasattr(self, "error_dict"):
|
| 190 |
+
return sum(dict(self).values(), [])
|
| 191 |
+
return list(self)
|
| 192 |
+
|
| 193 |
+
def update_error_dict(self, error_dict):
|
| 194 |
+
if hasattr(self, "error_dict"):
|
| 195 |
+
for field, error_list in self.error_dict.items():
|
| 196 |
+
error_dict.setdefault(field, []).extend(error_list)
|
| 197 |
+
else:
|
| 198 |
+
error_dict.setdefault(NON_FIELD_ERRORS, []).extend(self.error_list)
|
| 199 |
+
return error_dict
|
| 200 |
+
|
| 201 |
+
def __iter__(self):
|
| 202 |
+
if hasattr(self, "error_dict"):
|
| 203 |
+
for field, errors in self.error_dict.items():
|
| 204 |
+
yield field, list(ValidationError(errors))
|
| 205 |
+
else:
|
| 206 |
+
for error in self.error_list:
|
| 207 |
+
message = error.message
|
| 208 |
+
if error.params:
|
| 209 |
+
message %= error.params
|
| 210 |
+
yield str(message)
|
| 211 |
+
|
| 212 |
+
def __str__(self):
|
| 213 |
+
if hasattr(self, "error_dict"):
|
| 214 |
+
return repr(dict(self))
|
| 215 |
+
return repr(list(self))
|
| 216 |
+
|
| 217 |
+
def __repr__(self):
|
| 218 |
+
return "ValidationError(%s)" % self
|
| 219 |
+
|
| 220 |
+
def __eq__(self, other):
|
| 221 |
+
if not isinstance(other, ValidationError):
|
| 222 |
+
return NotImplemented
|
| 223 |
+
return hash(self) == hash(other)
|
| 224 |
+
|
| 225 |
+
def __hash__(self):
|
| 226 |
+
if hasattr(self, "message"):
|
| 227 |
+
return hash(
|
| 228 |
+
(
|
| 229 |
+
self.message,
|
| 230 |
+
self.code,
|
| 231 |
+
make_hashable(self.params),
|
| 232 |
+
)
|
| 233 |
+
)
|
| 234 |
+
if hasattr(self, "error_dict"):
|
| 235 |
+
return hash(make_hashable(self.error_dict))
|
| 236 |
+
return hash(tuple(sorted(self.error_list, key=operator.attrgetter("message"))))
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class EmptyResultSet(Exception):
|
| 240 |
+
"""A database query predicate is impossible."""
|
| 241 |
+
|
| 242 |
+
pass
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class FullResultSet(Exception):
|
| 246 |
+
"""A database query predicate is matches everything."""
|
| 247 |
+
|
| 248 |
+
pass
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class SynchronousOnlyOperation(Exception):
|
| 252 |
+
"""The user tried to call a sync-only function from an async context."""
|
| 253 |
+
|
| 254 |
+
pass
|
testbed/django__django/django/core/files/temp.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The temp module provides a NamedTemporaryFile that can be reopened in the same
|
| 3 |
+
process on any platform. Most platforms use the standard Python
|
| 4 |
+
tempfile.NamedTemporaryFile class, but Windows users are given a custom class.
|
| 5 |
+
|
| 6 |
+
This is needed because the Python implementation of NamedTemporaryFile uses the
|
| 7 |
+
O_TEMPORARY flag under Windows, which prevents the file from being reopened
|
| 8 |
+
if the same flag is not provided [1][2]. Note that this does not address the
|
| 9 |
+
more general issue of opening a file for writing and reading in multiple
|
| 10 |
+
processes in a manner that works across platforms.
|
| 11 |
+
|
| 12 |
+
The custom version of NamedTemporaryFile doesn't support the same keyword
|
| 13 |
+
arguments available in tempfile.NamedTemporaryFile.
|
| 14 |
+
|
| 15 |
+
1: https://mail.python.org/pipermail/python-list/2005-December/336957.html
|
| 16 |
+
2: https://bugs.python.org/issue14243
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import os
|
| 20 |
+
import tempfile
|
| 21 |
+
|
| 22 |
+
from django.core.files.utils import FileProxyMixin
|
| 23 |
+
|
| 24 |
+
__all__ = (
|
| 25 |
+
"NamedTemporaryFile",
|
| 26 |
+
"gettempdir",
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
if os.name == "nt":
|
| 31 |
+
|
| 32 |
+
class TemporaryFile(FileProxyMixin):
|
| 33 |
+
"""
|
| 34 |
+
Temporary file object constructor that supports reopening of the
|
| 35 |
+
temporary file in Windows.
|
| 36 |
+
|
| 37 |
+
Unlike tempfile.NamedTemporaryFile from the standard library,
|
| 38 |
+
__init__() doesn't support the 'delete', 'buffering', 'encoding', or
|
| 39 |
+
'newline' keyword arguments.
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
def __init__(self, mode="w+b", bufsize=-1, suffix="", prefix="", dir=None):
|
| 43 |
+
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
|
| 44 |
+
self.name = name
|
| 45 |
+
self.file = os.fdopen(fd, mode, bufsize)
|
| 46 |
+
self.close_called = False
|
| 47 |
+
|
| 48 |
+
# Because close can be called during shutdown
|
| 49 |
+
# we need to cache os.unlink and access it
|
| 50 |
+
# as self.unlink only
|
| 51 |
+
unlink = os.unlink
|
| 52 |
+
|
| 53 |
+
def close(self):
|
| 54 |
+
if not self.close_called:
|
| 55 |
+
self.close_called = True
|
| 56 |
+
try:
|
| 57 |
+
self.file.close()
|
| 58 |
+
except OSError:
|
| 59 |
+
pass
|
| 60 |
+
try:
|
| 61 |
+
self.unlink(self.name)
|
| 62 |
+
except OSError:
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
def __del__(self):
|
| 66 |
+
self.close()
|
| 67 |
+
|
| 68 |
+
def __enter__(self):
|
| 69 |
+
self.file.__enter__()
|
| 70 |
+
return self
|
| 71 |
+
|
| 72 |
+
def __exit__(self, exc, value, tb):
|
| 73 |
+
self.file.__exit__(exc, value, tb)
|
| 74 |
+
|
| 75 |
+
NamedTemporaryFile = TemporaryFile
|
| 76 |
+
else:
|
| 77 |
+
NamedTemporaryFile = tempfile.NamedTemporaryFile
|
| 78 |
+
|
| 79 |
+
gettempdir = tempfile.gettempdir
|
testbed/django__django/django/core/files/utils.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pathlib
|
| 3 |
+
|
| 4 |
+
from django.core.exceptions import SuspiciousFileOperation
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def validate_file_name(name, allow_relative_path=False):
|
| 8 |
+
# Remove potentially dangerous names
|
| 9 |
+
if os.path.basename(name) in {"", ".", ".."}:
|
| 10 |
+
raise SuspiciousFileOperation("Could not derive file name from '%s'" % name)
|
| 11 |
+
|
| 12 |
+
if allow_relative_path:
|
| 13 |
+
# Use PurePosixPath() because this branch is checked only in
|
| 14 |
+
# FileField.generate_filename() where all file paths are expected to be
|
| 15 |
+
# Unix style (with forward slashes).
|
| 16 |
+
path = pathlib.PurePosixPath(name)
|
| 17 |
+
if path.is_absolute() or ".." in path.parts:
|
| 18 |
+
raise SuspiciousFileOperation(
|
| 19 |
+
"Detected path traversal attempt in '%s'" % name
|
| 20 |
+
)
|
| 21 |
+
elif name != os.path.basename(name):
|
| 22 |
+
raise SuspiciousFileOperation("File name '%s' includes path elements" % name)
|
| 23 |
+
|
| 24 |
+
return name
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class FileProxyMixin:
|
| 28 |
+
"""
|
| 29 |
+
A mixin class used to forward file methods to an underlying file
|
| 30 |
+
object. The internal file object has to be called "file"::
|
| 31 |
+
|
| 32 |
+
class FileProxy(FileProxyMixin):
|
| 33 |
+
def __init__(self, file):
|
| 34 |
+
self.file = file
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
encoding = property(lambda self: self.file.encoding)
|
| 38 |
+
fileno = property(lambda self: self.file.fileno)
|
| 39 |
+
flush = property(lambda self: self.file.flush)
|
| 40 |
+
isatty = property(lambda self: self.file.isatty)
|
| 41 |
+
newlines = property(lambda self: self.file.newlines)
|
| 42 |
+
read = property(lambda self: self.file.read)
|
| 43 |
+
readinto = property(lambda self: self.file.readinto)
|
| 44 |
+
readline = property(lambda self: self.file.readline)
|
| 45 |
+
readlines = property(lambda self: self.file.readlines)
|
| 46 |
+
seek = property(lambda self: self.file.seek)
|
| 47 |
+
tell = property(lambda self: self.file.tell)
|
| 48 |
+
truncate = property(lambda self: self.file.truncate)
|
| 49 |
+
write = property(lambda self: self.file.write)
|
| 50 |
+
writelines = property(lambda self: self.file.writelines)
|
| 51 |
+
|
| 52 |
+
@property
|
| 53 |
+
def closed(self):
|
| 54 |
+
return not self.file or self.file.closed
|
| 55 |
+
|
| 56 |
+
def readable(self):
|
| 57 |
+
if self.closed:
|
| 58 |
+
return False
|
| 59 |
+
if hasattr(self.file, "readable"):
|
| 60 |
+
return self.file.readable()
|
| 61 |
+
return True
|
| 62 |
+
|
| 63 |
+
def writable(self):
|
| 64 |
+
if self.closed:
|
| 65 |
+
return False
|
| 66 |
+
if hasattr(self.file, "writable"):
|
| 67 |
+
return self.file.writable()
|
| 68 |
+
return "w" in getattr(self.file, "mode", "")
|
| 69 |
+
|
| 70 |
+
def seekable(self):
|
| 71 |
+
if self.closed:
|
| 72 |
+
return False
|
| 73 |
+
if hasattr(self.file, "seekable"):
|
| 74 |
+
return self.file.seekable()
|
| 75 |
+
return True
|
| 76 |
+
|
| 77 |
+
def __iter__(self):
|
| 78 |
+
return iter(self.file)
|
testbed/django__django/django/core/mail/__init__.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tools for sending email.
|
| 3 |
+
"""
|
| 4 |
+
from django.conf import settings
|
| 5 |
+
|
| 6 |
+
# Imported for backwards compatibility and for the sake
|
| 7 |
+
# of a cleaner namespace. These symbols used to be in
|
| 8 |
+
# django/core/mail.py before the introduction of email
|
| 9 |
+
# backends and the subsequent reorganization (See #10355)
|
| 10 |
+
from django.core.mail.message import (
|
| 11 |
+
DEFAULT_ATTACHMENT_MIME_TYPE,
|
| 12 |
+
BadHeaderError,
|
| 13 |
+
EmailMessage,
|
| 14 |
+
EmailMultiAlternatives,
|
| 15 |
+
SafeMIMEMultipart,
|
| 16 |
+
SafeMIMEText,
|
| 17 |
+
forbid_multi_line_headers,
|
| 18 |
+
make_msgid,
|
| 19 |
+
)
|
| 20 |
+
from django.core.mail.utils import DNS_NAME, CachedDnsName
|
| 21 |
+
from django.utils.module_loading import import_string
|
| 22 |
+
|
| 23 |
+
__all__ = [
|
| 24 |
+
"CachedDnsName",
|
| 25 |
+
"DNS_NAME",
|
| 26 |
+
"EmailMessage",
|
| 27 |
+
"EmailMultiAlternatives",
|
| 28 |
+
"SafeMIMEText",
|
| 29 |
+
"SafeMIMEMultipart",
|
| 30 |
+
"DEFAULT_ATTACHMENT_MIME_TYPE",
|
| 31 |
+
"make_msgid",
|
| 32 |
+
"BadHeaderError",
|
| 33 |
+
"forbid_multi_line_headers",
|
| 34 |
+
"get_connection",
|
| 35 |
+
"send_mail",
|
| 36 |
+
"send_mass_mail",
|
| 37 |
+
"mail_admins",
|
| 38 |
+
"mail_managers",
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def get_connection(backend=None, fail_silently=False, **kwds):
|
| 43 |
+
"""Load an email backend and return an instance of it.
|
| 44 |
+
|
| 45 |
+
If backend is None (default), use settings.EMAIL_BACKEND.
|
| 46 |
+
|
| 47 |
+
Both fail_silently and other keyword arguments are used in the
|
| 48 |
+
constructor of the backend.
|
| 49 |
+
"""
|
| 50 |
+
klass = import_string(backend or settings.EMAIL_BACKEND)
|
| 51 |
+
return klass(fail_silently=fail_silently, **kwds)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def send_mail(
|
| 55 |
+
subject,
|
| 56 |
+
message,
|
| 57 |
+
from_email,
|
| 58 |
+
recipient_list,
|
| 59 |
+
fail_silently=False,
|
| 60 |
+
auth_user=None,
|
| 61 |
+
auth_password=None,
|
| 62 |
+
connection=None,
|
| 63 |
+
html_message=None,
|
| 64 |
+
):
|
| 65 |
+
"""
|
| 66 |
+
Easy wrapper for sending a single message to a recipient list. All members
|
| 67 |
+
of the recipient list will see the other recipients in the 'To' field.
|
| 68 |
+
|
| 69 |
+
If from_email is None, use the DEFAULT_FROM_EMAIL setting.
|
| 70 |
+
If auth_user is None, use the EMAIL_HOST_USER setting.
|
| 71 |
+
If auth_password is None, use the EMAIL_HOST_PASSWORD setting.
|
| 72 |
+
|
| 73 |
+
Note: The API for this method is frozen. New code wanting to extend the
|
| 74 |
+
functionality should use the EmailMessage class directly.
|
| 75 |
+
"""
|
| 76 |
+
connection = connection or get_connection(
|
| 77 |
+
username=auth_user,
|
| 78 |
+
password=auth_password,
|
| 79 |
+
fail_silently=fail_silently,
|
| 80 |
+
)
|
| 81 |
+
mail = EmailMultiAlternatives(
|
| 82 |
+
subject, message, from_email, recipient_list, connection=connection
|
| 83 |
+
)
|
| 84 |
+
if html_message:
|
| 85 |
+
mail.attach_alternative(html_message, "text/html")
|
| 86 |
+
|
| 87 |
+
return mail.send()
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def send_mass_mail(
|
| 91 |
+
datatuple, fail_silently=False, auth_user=None, auth_password=None, connection=None
|
| 92 |
+
):
|
| 93 |
+
"""
|
| 94 |
+
Given a datatuple of (subject, message, from_email, recipient_list), send
|
| 95 |
+
each message to each recipient list. Return the number of emails sent.
|
| 96 |
+
|
| 97 |
+
If from_email is None, use the DEFAULT_FROM_EMAIL setting.
|
| 98 |
+
If auth_user and auth_password are set, use them to log in.
|
| 99 |
+
If auth_user is None, use the EMAIL_HOST_USER setting.
|
| 100 |
+
If auth_password is None, use the EMAIL_HOST_PASSWORD setting.
|
| 101 |
+
|
| 102 |
+
Note: The API for this method is frozen. New code wanting to extend the
|
| 103 |
+
functionality should use the EmailMessage class directly.
|
| 104 |
+
"""
|
| 105 |
+
connection = connection or get_connection(
|
| 106 |
+
username=auth_user,
|
| 107 |
+
password=auth_password,
|
| 108 |
+
fail_silently=fail_silently,
|
| 109 |
+
)
|
| 110 |
+
messages = [
|
| 111 |
+
EmailMessage(subject, message, sender, recipient, connection=connection)
|
| 112 |
+
for subject, message, sender, recipient in datatuple
|
| 113 |
+
]
|
| 114 |
+
return connection.send_messages(messages)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def mail_admins(
|
| 118 |
+
subject, message, fail_silently=False, connection=None, html_message=None
|
| 119 |
+
):
|
| 120 |
+
"""Send a message to the admins, as defined by the ADMINS setting."""
|
| 121 |
+
if not settings.ADMINS:
|
| 122 |
+
return
|
| 123 |
+
if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.ADMINS):
|
| 124 |
+
raise ValueError("The ADMINS setting must be a list of 2-tuples.")
|
| 125 |
+
mail = EmailMultiAlternatives(
|
| 126 |
+
"%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject),
|
| 127 |
+
message,
|
| 128 |
+
settings.SERVER_EMAIL,
|
| 129 |
+
[a[1] for a in settings.ADMINS],
|
| 130 |
+
connection=connection,
|
| 131 |
+
)
|
| 132 |
+
if html_message:
|
| 133 |
+
mail.attach_alternative(html_message, "text/html")
|
| 134 |
+
mail.send(fail_silently=fail_silently)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def mail_managers(
|
| 138 |
+
subject, message, fail_silently=False, connection=None, html_message=None
|
| 139 |
+
):
|
| 140 |
+
"""Send a message to the managers, as defined by the MANAGERS setting."""
|
| 141 |
+
if not settings.MANAGERS:
|
| 142 |
+
return
|
| 143 |
+
if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.MANAGERS):
|
| 144 |
+
raise ValueError("The MANAGERS setting must be a list of 2-tuples.")
|
| 145 |
+
mail = EmailMultiAlternatives(
|
| 146 |
+
"%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject),
|
| 147 |
+
message,
|
| 148 |
+
settings.SERVER_EMAIL,
|
| 149 |
+
[a[1] for a in settings.MANAGERS],
|
| 150 |
+
connection=connection,
|
| 151 |
+
)
|
| 152 |
+
if html_message:
|
| 153 |
+
mail.attach_alternative(html_message, "text/html")
|
| 154 |
+
mail.send(fail_silently=fail_silently)
|
testbed/django__django/django/core/mail/backends/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Mail backends shipped with Django.
|
testbed/django__django/django/core/mail/backends/console.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Email backend that writes messages to console instead of sending them.
|
| 3 |
+
"""
|
| 4 |
+
import sys
|
| 5 |
+
import threading
|
| 6 |
+
|
| 7 |
+
from django.core.mail.backends.base import BaseEmailBackend
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class EmailBackend(BaseEmailBackend):
|
| 11 |
+
def __init__(self, *args, **kwargs):
|
| 12 |
+
self.stream = kwargs.pop("stream", sys.stdout)
|
| 13 |
+
self._lock = threading.RLock()
|
| 14 |
+
super().__init__(*args, **kwargs)
|
| 15 |
+
|
| 16 |
+
def write_message(self, message):
|
| 17 |
+
msg = message.message()
|
| 18 |
+
msg_data = msg.as_bytes()
|
| 19 |
+
charset = (
|
| 20 |
+
msg.get_charset().get_output_charset() if msg.get_charset() else "utf-8"
|
| 21 |
+
)
|
| 22 |
+
msg_data = msg_data.decode(charset)
|
| 23 |
+
self.stream.write("%s\n" % msg_data)
|
| 24 |
+
self.stream.write("-" * 79)
|
| 25 |
+
self.stream.write("\n")
|
| 26 |
+
|
| 27 |
+
def send_messages(self, email_messages):
|
| 28 |
+
"""Write all messages to the stream in a thread-safe way."""
|
| 29 |
+
if not email_messages:
|
| 30 |
+
return
|
| 31 |
+
msg_count = 0
|
| 32 |
+
with self._lock:
|
| 33 |
+
try:
|
| 34 |
+
stream_created = self.open()
|
| 35 |
+
for message in email_messages:
|
| 36 |
+
self.write_message(message)
|
| 37 |
+
self.stream.flush() # flush after each message
|
| 38 |
+
msg_count += 1
|
| 39 |
+
if stream_created:
|
| 40 |
+
self.close()
|
| 41 |
+
except Exception:
|
| 42 |
+
if not self.fail_silently:
|
| 43 |
+
raise
|
| 44 |
+
return msg_count
|
testbed/django__django/django/core/mail/backends/dummy.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dummy email backend that does nothing.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from django.core.mail.backends.base import BaseEmailBackend
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class EmailBackend(BaseEmailBackend):
|
| 9 |
+
def send_messages(self, email_messages):
|
| 10 |
+
return len(list(email_messages))
|
testbed/django__django/django/core/mail/backends/filebased.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Email backend that writes messages to a file."""
|
| 2 |
+
|
| 3 |
+
import datetime
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from django.conf import settings
|
| 7 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 8 |
+
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class EmailBackend(ConsoleEmailBackend):
|
| 12 |
+
def __init__(self, *args, file_path=None, **kwargs):
|
| 13 |
+
self._fname = None
|
| 14 |
+
if file_path is not None:
|
| 15 |
+
self.file_path = file_path
|
| 16 |
+
else:
|
| 17 |
+
self.file_path = getattr(settings, "EMAIL_FILE_PATH", None)
|
| 18 |
+
self.file_path = os.path.abspath(self.file_path)
|
| 19 |
+
try:
|
| 20 |
+
os.makedirs(self.file_path, exist_ok=True)
|
| 21 |
+
except FileExistsError:
|
| 22 |
+
raise ImproperlyConfigured(
|
| 23 |
+
"Path for saving email messages exists, but is not a directory: %s"
|
| 24 |
+
% self.file_path
|
| 25 |
+
)
|
| 26 |
+
except OSError as err:
|
| 27 |
+
raise ImproperlyConfigured(
|
| 28 |
+
"Could not create directory for saving email messages: %s (%s)"
|
| 29 |
+
% (self.file_path, err)
|
| 30 |
+
)
|
| 31 |
+
# Make sure that self.file_path is writable.
|
| 32 |
+
if not os.access(self.file_path, os.W_OK):
|
| 33 |
+
raise ImproperlyConfigured(
|
| 34 |
+
"Could not write to directory: %s" % self.file_path
|
| 35 |
+
)
|
| 36 |
+
# Finally, call super().
|
| 37 |
+
# Since we're using the console-based backend as a base,
|
| 38 |
+
# force the stream to be None, so we don't default to stdout
|
| 39 |
+
kwargs["stream"] = None
|
| 40 |
+
super().__init__(*args, **kwargs)
|
| 41 |
+
|
| 42 |
+
def write_message(self, message):
|
| 43 |
+
self.stream.write(message.message().as_bytes() + b"\n")
|
| 44 |
+
self.stream.write(b"-" * 79)
|
| 45 |
+
self.stream.write(b"\n")
|
| 46 |
+
|
| 47 |
+
def _get_filename(self):
|
| 48 |
+
"""Return a unique file name."""
|
| 49 |
+
if self._fname is None:
|
| 50 |
+
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
|
| 51 |
+
fname = "%s-%s.log" % (timestamp, abs(id(self)))
|
| 52 |
+
self._fname = os.path.join(self.file_path, fname)
|
| 53 |
+
return self._fname
|
| 54 |
+
|
| 55 |
+
def open(self):
|
| 56 |
+
if self.stream is None:
|
| 57 |
+
self.stream = open(self._get_filename(), "ab")
|
| 58 |
+
return True
|
| 59 |
+
return False
|
| 60 |
+
|
| 61 |
+
def close(self):
|
| 62 |
+
try:
|
| 63 |
+
if self.stream is not None:
|
| 64 |
+
self.stream.close()
|
| 65 |
+
finally:
|
| 66 |
+
self.stream = None
|
testbed/django__django/django/core/mail/backends/locmem.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backend for test environment.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from django.core import mail
|
| 6 |
+
from django.core.mail.backends.base import BaseEmailBackend
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class EmailBackend(BaseEmailBackend):
|
| 10 |
+
"""
|
| 11 |
+
An email backend for use during test sessions.
|
| 12 |
+
|
| 13 |
+
The test connection stores email messages in a dummy outbox,
|
| 14 |
+
rather than sending them out on the wire.
|
| 15 |
+
|
| 16 |
+
The dummy outbox is accessible through the outbox instance attribute.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
def __init__(self, *args, **kwargs):
|
| 20 |
+
super().__init__(*args, **kwargs)
|
| 21 |
+
if not hasattr(mail, "outbox"):
|
| 22 |
+
mail.outbox = []
|
| 23 |
+
|
| 24 |
+
def send_messages(self, messages):
|
| 25 |
+
"""Redirect messages to the dummy outbox"""
|
| 26 |
+
msg_count = 0
|
| 27 |
+
for message in messages: # .message() triggers header validation
|
| 28 |
+
message.message()
|
| 29 |
+
mail.outbox.append(message)
|
| 30 |
+
msg_count += 1
|
| 31 |
+
return msg_count
|
testbed/django__django/django/core/mail/utils.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Email message and email sending related helper functions.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import socket
|
| 6 |
+
|
| 7 |
+
from django.utils.encoding import punycode
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
|
| 11 |
+
# seconds, which slows down the restart of the server.
|
| 12 |
+
class CachedDnsName:
|
| 13 |
+
def __str__(self):
|
| 14 |
+
return self.get_fqdn()
|
| 15 |
+
|
| 16 |
+
def get_fqdn(self):
|
| 17 |
+
if not hasattr(self, "_fqdn"):
|
| 18 |
+
self._fqdn = punycode(socket.getfqdn())
|
| 19 |
+
return self._fqdn
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
DNS_NAME = CachedDnsName()
|
testbed/django__django/django/core/management/__init__.py
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
import pkgutil
|
| 4 |
+
import sys
|
| 5 |
+
from argparse import (
|
| 6 |
+
_AppendConstAction,
|
| 7 |
+
_CountAction,
|
| 8 |
+
_StoreConstAction,
|
| 9 |
+
_SubParsersAction,
|
| 10 |
+
)
|
| 11 |
+
from collections import defaultdict
|
| 12 |
+
from difflib import get_close_matches
|
| 13 |
+
from importlib import import_module
|
| 14 |
+
|
| 15 |
+
import django
|
| 16 |
+
from django.apps import apps
|
| 17 |
+
from django.conf import settings
|
| 18 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 19 |
+
from django.core.management.base import (
|
| 20 |
+
BaseCommand,
|
| 21 |
+
CommandError,
|
| 22 |
+
CommandParser,
|
| 23 |
+
handle_default_options,
|
| 24 |
+
)
|
| 25 |
+
from django.core.management.color import color_style
|
| 26 |
+
from django.utils import autoreload
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def find_commands(management_dir):
|
| 30 |
+
"""
|
| 31 |
+
Given a path to a management directory, return a list of all the command
|
| 32 |
+
names that are available.
|
| 33 |
+
"""
|
| 34 |
+
command_dir = os.path.join(management_dir, "commands")
|
| 35 |
+
return [
|
| 36 |
+
name
|
| 37 |
+
for _, name, is_pkg in pkgutil.iter_modules([command_dir])
|
| 38 |
+
if not is_pkg and not name.startswith("_")
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def load_command_class(app_name, name):
|
| 43 |
+
"""
|
| 44 |
+
Given a command name and an application name, return the Command
|
| 45 |
+
class instance. Allow all errors raised by the import process
|
| 46 |
+
(ImportError, AttributeError) to propagate.
|
| 47 |
+
"""
|
| 48 |
+
module = import_module("%s.management.commands.%s" % (app_name, name))
|
| 49 |
+
return module.Command()
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@functools.cache
|
| 53 |
+
def get_commands():
|
| 54 |
+
"""
|
| 55 |
+
Return a dictionary mapping command names to their callback applications.
|
| 56 |
+
|
| 57 |
+
Look for a management.commands package in django.core, and in each
|
| 58 |
+
installed application -- if a commands package exists, register all
|
| 59 |
+
commands in that package.
|
| 60 |
+
|
| 61 |
+
Core commands are always included. If a settings module has been
|
| 62 |
+
specified, also include user-defined commands.
|
| 63 |
+
|
| 64 |
+
The dictionary is in the format {command_name: app_name}. Key-value
|
| 65 |
+
pairs from this dictionary can then be used in calls to
|
| 66 |
+
load_command_class(app_name, command_name)
|
| 67 |
+
|
| 68 |
+
The dictionary is cached on the first call and reused on subsequent
|
| 69 |
+
calls.
|
| 70 |
+
"""
|
| 71 |
+
commands = {name: "django.core" for name in find_commands(__path__[0])}
|
| 72 |
+
|
| 73 |
+
if not settings.configured:
|
| 74 |
+
return commands
|
| 75 |
+
|
| 76 |
+
for app_config in reversed(apps.get_app_configs()):
|
| 77 |
+
path = os.path.join(app_config.path, "management")
|
| 78 |
+
commands.update({name: app_config.name for name in find_commands(path)})
|
| 79 |
+
|
| 80 |
+
return commands
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def call_command(command_name, *args, **options):
|
| 84 |
+
"""
|
| 85 |
+
Call the given command, with the given options and args/kwargs.
|
| 86 |
+
|
| 87 |
+
This is the primary API you should use for calling specific commands.
|
| 88 |
+
|
| 89 |
+
`command_name` may be a string or a command object. Using a string is
|
| 90 |
+
preferred unless the command object is required for further processing or
|
| 91 |
+
testing.
|
| 92 |
+
|
| 93 |
+
Some examples:
|
| 94 |
+
call_command('migrate')
|
| 95 |
+
call_command('shell', plain=True)
|
| 96 |
+
call_command('sqlmigrate', 'myapp')
|
| 97 |
+
|
| 98 |
+
from django.core.management.commands import flush
|
| 99 |
+
cmd = flush.Command()
|
| 100 |
+
call_command(cmd, verbosity=0, interactive=False)
|
| 101 |
+
# Do something with cmd ...
|
| 102 |
+
"""
|
| 103 |
+
if isinstance(command_name, BaseCommand):
|
| 104 |
+
# Command object passed in.
|
| 105 |
+
command = command_name
|
| 106 |
+
command_name = command.__class__.__module__.split(".")[-1]
|
| 107 |
+
else:
|
| 108 |
+
# Load the command object by name.
|
| 109 |
+
try:
|
| 110 |
+
app_name = get_commands()[command_name]
|
| 111 |
+
except KeyError:
|
| 112 |
+
raise CommandError("Unknown command: %r" % command_name)
|
| 113 |
+
|
| 114 |
+
if isinstance(app_name, BaseCommand):
|
| 115 |
+
# If the command is already loaded, use it directly.
|
| 116 |
+
command = app_name
|
| 117 |
+
else:
|
| 118 |
+
command = load_command_class(app_name, command_name)
|
| 119 |
+
|
| 120 |
+
# Simulate argument parsing to get the option defaults (see #10080 for details).
|
| 121 |
+
parser = command.create_parser("", command_name)
|
| 122 |
+
# Use the `dest` option name from the parser option
|
| 123 |
+
opt_mapping = {
|
| 124 |
+
min(s_opt.option_strings).lstrip("-").replace("-", "_"): s_opt.dest
|
| 125 |
+
for s_opt in parser._actions
|
| 126 |
+
if s_opt.option_strings
|
| 127 |
+
}
|
| 128 |
+
arg_options = {opt_mapping.get(key, key): value for key, value in options.items()}
|
| 129 |
+
parse_args = []
|
| 130 |
+
for arg in args:
|
| 131 |
+
if isinstance(arg, (list, tuple)):
|
| 132 |
+
parse_args += map(str, arg)
|
| 133 |
+
else:
|
| 134 |
+
parse_args.append(str(arg))
|
| 135 |
+
|
| 136 |
+
def get_actions(parser):
|
| 137 |
+
# Parser actions and actions from sub-parser choices.
|
| 138 |
+
for opt in parser._actions:
|
| 139 |
+
if isinstance(opt, _SubParsersAction):
|
| 140 |
+
for sub_opt in opt.choices.values():
|
| 141 |
+
yield from get_actions(sub_opt)
|
| 142 |
+
else:
|
| 143 |
+
yield opt
|
| 144 |
+
|
| 145 |
+
parser_actions = list(get_actions(parser))
|
| 146 |
+
mutually_exclusive_required_options = {
|
| 147 |
+
opt
|
| 148 |
+
for group in parser._mutually_exclusive_groups
|
| 149 |
+
for opt in group._group_actions
|
| 150 |
+
if group.required
|
| 151 |
+
}
|
| 152 |
+
# Any required arguments which are passed in via **options must be passed
|
| 153 |
+
# to parse_args().
|
| 154 |
+
for opt in parser_actions:
|
| 155 |
+
if opt.dest in options and (
|
| 156 |
+
opt.required or opt in mutually_exclusive_required_options
|
| 157 |
+
):
|
| 158 |
+
opt_dest_count = sum(v == opt.dest for v in opt_mapping.values())
|
| 159 |
+
if opt_dest_count > 1:
|
| 160 |
+
raise TypeError(
|
| 161 |
+
f"Cannot pass the dest {opt.dest!r} that matches multiple "
|
| 162 |
+
f"arguments via **options."
|
| 163 |
+
)
|
| 164 |
+
parse_args.append(min(opt.option_strings))
|
| 165 |
+
if isinstance(opt, (_AppendConstAction, _CountAction, _StoreConstAction)):
|
| 166 |
+
continue
|
| 167 |
+
value = arg_options[opt.dest]
|
| 168 |
+
if isinstance(value, (list, tuple)):
|
| 169 |
+
parse_args += map(str, value)
|
| 170 |
+
else:
|
| 171 |
+
parse_args.append(str(value))
|
| 172 |
+
defaults = parser.parse_args(args=parse_args)
|
| 173 |
+
defaults = dict(defaults._get_kwargs(), **arg_options)
|
| 174 |
+
# Raise an error if any unknown options were passed.
|
| 175 |
+
stealth_options = set(command.base_stealth_options + command.stealth_options)
|
| 176 |
+
dest_parameters = {action.dest for action in parser_actions}
|
| 177 |
+
valid_options = (dest_parameters | stealth_options).union(opt_mapping)
|
| 178 |
+
unknown_options = set(options) - valid_options
|
| 179 |
+
if unknown_options:
|
| 180 |
+
raise TypeError(
|
| 181 |
+
"Unknown option(s) for %s command: %s. "
|
| 182 |
+
"Valid options are: %s."
|
| 183 |
+
% (
|
| 184 |
+
command_name,
|
| 185 |
+
", ".join(sorted(unknown_options)),
|
| 186 |
+
", ".join(sorted(valid_options)),
|
| 187 |
+
)
|
| 188 |
+
)
|
| 189 |
+
# Move positional args out of options to mimic legacy optparse
|
| 190 |
+
args = defaults.pop("args", ())
|
| 191 |
+
if "skip_checks" not in options:
|
| 192 |
+
defaults["skip_checks"] = True
|
| 193 |
+
|
| 194 |
+
return command.execute(*args, **defaults)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class ManagementUtility:
|
| 198 |
+
"""
|
| 199 |
+
Encapsulate the logic of the django-admin and manage.py utilities.
|
| 200 |
+
"""
|
| 201 |
+
|
| 202 |
+
def __init__(self, argv=None):
|
| 203 |
+
self.argv = argv or sys.argv[:]
|
| 204 |
+
self.prog_name = os.path.basename(self.argv[0])
|
| 205 |
+
if self.prog_name == "__main__.py":
|
| 206 |
+
self.prog_name = "python -m django"
|
| 207 |
+
self.settings_exception = None
|
| 208 |
+
|
| 209 |
+
def main_help_text(self, commands_only=False):
|
| 210 |
+
"""Return the script's main help text, as a string."""
|
| 211 |
+
if commands_only:
|
| 212 |
+
usage = sorted(get_commands())
|
| 213 |
+
else:
|
| 214 |
+
usage = [
|
| 215 |
+
"",
|
| 216 |
+
"Type '%s help <subcommand>' for help on a specific subcommand."
|
| 217 |
+
% self.prog_name,
|
| 218 |
+
"",
|
| 219 |
+
"Available subcommands:",
|
| 220 |
+
]
|
| 221 |
+
commands_dict = defaultdict(lambda: [])
|
| 222 |
+
for name, app in get_commands().items():
|
| 223 |
+
if app == "django.core":
|
| 224 |
+
app = "django"
|
| 225 |
+
else:
|
| 226 |
+
app = app.rpartition(".")[-1]
|
| 227 |
+
commands_dict[app].append(name)
|
| 228 |
+
style = color_style()
|
| 229 |
+
for app in sorted(commands_dict):
|
| 230 |
+
usage.append("")
|
| 231 |
+
usage.append(style.NOTICE("[%s]" % app))
|
| 232 |
+
for name in sorted(commands_dict[app]):
|
| 233 |
+
usage.append(" %s" % name)
|
| 234 |
+
# Output an extra note if settings are not properly configured
|
| 235 |
+
if self.settings_exception is not None:
|
| 236 |
+
usage.append(
|
| 237 |
+
style.NOTICE(
|
| 238 |
+
"Note that only Django core commands are listed "
|
| 239 |
+
"as settings are not properly configured (error: %s)."
|
| 240 |
+
% self.settings_exception
|
| 241 |
+
)
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
return "\n".join(usage)
|
| 245 |
+
|
| 246 |
+
def fetch_command(self, subcommand):
|
| 247 |
+
"""
|
| 248 |
+
Try to fetch the given subcommand, printing a message with the
|
| 249 |
+
appropriate command called from the command line (usually
|
| 250 |
+
"django-admin" or "manage.py") if it can't be found.
|
| 251 |
+
"""
|
| 252 |
+
# Get commands outside of try block to prevent swallowing exceptions
|
| 253 |
+
commands = get_commands()
|
| 254 |
+
try:
|
| 255 |
+
app_name = commands[subcommand]
|
| 256 |
+
except KeyError:
|
| 257 |
+
if os.environ.get("DJANGO_SETTINGS_MODULE"):
|
| 258 |
+
# If `subcommand` is missing due to misconfigured settings, the
|
| 259 |
+
# following line will retrigger an ImproperlyConfigured exception
|
| 260 |
+
# (get_commands() swallows the original one) so the user is
|
| 261 |
+
# informed about it.
|
| 262 |
+
settings.INSTALLED_APPS
|
| 263 |
+
elif not settings.configured:
|
| 264 |
+
sys.stderr.write("No Django settings specified.\n")
|
| 265 |
+
possible_matches = get_close_matches(subcommand, commands)
|
| 266 |
+
sys.stderr.write("Unknown command: %r" % subcommand)
|
| 267 |
+
if possible_matches:
|
| 268 |
+
sys.stderr.write(". Did you mean %s?" % possible_matches[0])
|
| 269 |
+
sys.stderr.write("\nType '%s help' for usage.\n" % self.prog_name)
|
| 270 |
+
sys.exit(1)
|
| 271 |
+
if isinstance(app_name, BaseCommand):
|
| 272 |
+
# If the command is already loaded, use it directly.
|
| 273 |
+
klass = app_name
|
| 274 |
+
else:
|
| 275 |
+
klass = load_command_class(app_name, subcommand)
|
| 276 |
+
return klass
|
| 277 |
+
|
| 278 |
+
def autocomplete(self):
|
| 279 |
+
"""
|
| 280 |
+
Output completion suggestions for BASH.
|
| 281 |
+
|
| 282 |
+
The output of this function is passed to BASH's `COMPREPLY` variable
|
| 283 |
+
and treated as completion suggestions. `COMPREPLY` expects a space
|
| 284 |
+
separated string as the result.
|
| 285 |
+
|
| 286 |
+
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
|
| 287 |
+
to get information about the cli input. Please refer to the BASH
|
| 288 |
+
man-page for more information about this variables.
|
| 289 |
+
|
| 290 |
+
Subcommand options are saved as pairs. A pair consists of
|
| 291 |
+
the long option string (e.g. '--exclude') and a boolean
|
| 292 |
+
value indicating if the option requires arguments. When printing to
|
| 293 |
+
stdout, an equal sign is appended to options which require arguments.
|
| 294 |
+
|
| 295 |
+
Note: If debugging this function, it is recommended to write the debug
|
| 296 |
+
output in a separate file. Otherwise the debug output will be treated
|
| 297 |
+
and formatted as potential completion suggestions.
|
| 298 |
+
"""
|
| 299 |
+
# Don't complete if user hasn't sourced bash_completion file.
|
| 300 |
+
if "DJANGO_AUTO_COMPLETE" not in os.environ:
|
| 301 |
+
return
|
| 302 |
+
|
| 303 |
+
cwords = os.environ["COMP_WORDS"].split()[1:]
|
| 304 |
+
cword = int(os.environ["COMP_CWORD"])
|
| 305 |
+
|
| 306 |
+
try:
|
| 307 |
+
curr = cwords[cword - 1]
|
| 308 |
+
except IndexError:
|
| 309 |
+
curr = ""
|
| 310 |
+
|
| 311 |
+
subcommands = [*get_commands(), "help"]
|
| 312 |
+
options = [("--help", False)]
|
| 313 |
+
|
| 314 |
+
# subcommand
|
| 315 |
+
if cword == 1:
|
| 316 |
+
print(" ".join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
|
| 317 |
+
# subcommand options
|
| 318 |
+
# special case: the 'help' subcommand has no options
|
| 319 |
+
elif cwords[0] in subcommands and cwords[0] != "help":
|
| 320 |
+
subcommand_cls = self.fetch_command(cwords[0])
|
| 321 |
+
# special case: add the names of installed apps to options
|
| 322 |
+
if cwords[0] in ("dumpdata", "sqlmigrate", "sqlsequencereset", "test"):
|
| 323 |
+
try:
|
| 324 |
+
app_configs = apps.get_app_configs()
|
| 325 |
+
# Get the last part of the dotted path as the app name.
|
| 326 |
+
options.extend((app_config.label, 0) for app_config in app_configs)
|
| 327 |
+
except ImportError:
|
| 328 |
+
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
|
| 329 |
+
# user will find out once they execute the command.
|
| 330 |
+
pass
|
| 331 |
+
parser = subcommand_cls.create_parser("", cwords[0])
|
| 332 |
+
options.extend(
|
| 333 |
+
(min(s_opt.option_strings), s_opt.nargs != 0)
|
| 334 |
+
for s_opt in parser._actions
|
| 335 |
+
if s_opt.option_strings
|
| 336 |
+
)
|
| 337 |
+
# filter out previously specified options from available options
|
| 338 |
+
prev_opts = {x.split("=")[0] for x in cwords[1 : cword - 1]}
|
| 339 |
+
options = (opt for opt in options if opt[0] not in prev_opts)
|
| 340 |
+
|
| 341 |
+
# filter options by current input
|
| 342 |
+
options = sorted((k, v) for k, v in options if k.startswith(curr))
|
| 343 |
+
for opt_label, require_arg in options:
|
| 344 |
+
# append '=' to options which require args
|
| 345 |
+
if require_arg:
|
| 346 |
+
opt_label += "="
|
| 347 |
+
print(opt_label)
|
| 348 |
+
# Exit code of the bash completion function is never passed back to
|
| 349 |
+
# the user, so it's safe to always exit with 0.
|
| 350 |
+
# For more details see #25420.
|
| 351 |
+
sys.exit(0)
|
| 352 |
+
|
| 353 |
+
def execute(self):
|
| 354 |
+
"""
|
| 355 |
+
Given the command-line arguments, figure out which subcommand is being
|
| 356 |
+
run, create a parser appropriate to that command, and run it.
|
| 357 |
+
"""
|
| 358 |
+
try:
|
| 359 |
+
subcommand = self.argv[1]
|
| 360 |
+
except IndexError:
|
| 361 |
+
subcommand = "help" # Display help if no arguments were given.
|
| 362 |
+
|
| 363 |
+
# Preprocess options to extract --settings and --pythonpath.
|
| 364 |
+
# These options could affect the commands that are available, so they
|
| 365 |
+
# must be processed early.
|
| 366 |
+
parser = CommandParser(
|
| 367 |
+
prog=self.prog_name,
|
| 368 |
+
usage="%(prog)s subcommand [options] [args]",
|
| 369 |
+
add_help=False,
|
| 370 |
+
allow_abbrev=False,
|
| 371 |
+
)
|
| 372 |
+
parser.add_argument("--settings")
|
| 373 |
+
parser.add_argument("--pythonpath")
|
| 374 |
+
parser.add_argument("args", nargs="*") # catch-all
|
| 375 |
+
try:
|
| 376 |
+
options, args = parser.parse_known_args(self.argv[2:])
|
| 377 |
+
handle_default_options(options)
|
| 378 |
+
except CommandError:
|
| 379 |
+
pass # Ignore any option errors at this point.
|
| 380 |
+
|
| 381 |
+
try:
|
| 382 |
+
settings.INSTALLED_APPS
|
| 383 |
+
except ImproperlyConfigured as exc:
|
| 384 |
+
self.settings_exception = exc
|
| 385 |
+
except ImportError as exc:
|
| 386 |
+
self.settings_exception = exc
|
| 387 |
+
|
| 388 |
+
if settings.configured:
|
| 389 |
+
# Start the auto-reloading dev server even if the code is broken.
|
| 390 |
+
# The hardcoded condition is a code smell but we can't rely on a
|
| 391 |
+
# flag on the command class because we haven't located it yet.
|
| 392 |
+
if subcommand == "runserver" and "--noreload" not in self.argv:
|
| 393 |
+
try:
|
| 394 |
+
autoreload.check_errors(django.setup)()
|
| 395 |
+
except Exception:
|
| 396 |
+
# The exception will be raised later in the child process
|
| 397 |
+
# started by the autoreloader. Pretend it didn't happen by
|
| 398 |
+
# loading an empty list of applications.
|
| 399 |
+
apps.all_models = defaultdict(dict)
|
| 400 |
+
apps.app_configs = {}
|
| 401 |
+
apps.apps_ready = apps.models_ready = apps.ready = True
|
| 402 |
+
|
| 403 |
+
# Remove options not compatible with the built-in runserver
|
| 404 |
+
# (e.g. options for the contrib.staticfiles' runserver).
|
| 405 |
+
# Changes here require manually testing as described in
|
| 406 |
+
# #27522.
|
| 407 |
+
_parser = self.fetch_command("runserver").create_parser(
|
| 408 |
+
"django", "runserver"
|
| 409 |
+
)
|
| 410 |
+
_options, _args = _parser.parse_known_args(self.argv[2:])
|
| 411 |
+
for _arg in _args:
|
| 412 |
+
self.argv.remove(_arg)
|
| 413 |
+
|
| 414 |
+
# In all other cases, django.setup() is required to succeed.
|
| 415 |
+
else:
|
| 416 |
+
django.setup()
|
| 417 |
+
|
| 418 |
+
self.autocomplete()
|
| 419 |
+
|
| 420 |
+
if subcommand == "help":
|
| 421 |
+
if "--commands" in args:
|
| 422 |
+
sys.stdout.write(self.main_help_text(commands_only=True) + "\n")
|
| 423 |
+
elif not options.args:
|
| 424 |
+
sys.stdout.write(self.main_help_text() + "\n")
|
| 425 |
+
else:
|
| 426 |
+
self.fetch_command(options.args[0]).print_help(
|
| 427 |
+
self.prog_name, options.args[0]
|
| 428 |
+
)
|
| 429 |
+
# Special-cases: We want 'django-admin --version' and
|
| 430 |
+
# 'django-admin --help' to work, for backwards compatibility.
|
| 431 |
+
elif subcommand == "version" or self.argv[1:] == ["--version"]:
|
| 432 |
+
sys.stdout.write(django.get_version() + "\n")
|
| 433 |
+
elif self.argv[1:] in (["--help"], ["-h"]):
|
| 434 |
+
sys.stdout.write(self.main_help_text() + "\n")
|
| 435 |
+
else:
|
| 436 |
+
self.fetch_command(subcommand).run_from_argv(self.argv)
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def execute_from_command_line(argv=None):
|
| 440 |
+
"""Run a ManagementUtility."""
|
| 441 |
+
utility = ManagementUtility(argv)
|
| 442 |
+
utility.execute()
|
testbed/django__django/django/core/management/base.py
ADDED
|
@@ -0,0 +1,688 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Base classes for writing management commands (named commands which can
|
| 3 |
+
be executed through ``django-admin`` or ``manage.py``).
|
| 4 |
+
"""
|
| 5 |
+
import argparse
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
from argparse import ArgumentParser, HelpFormatter
|
| 9 |
+
from functools import partial
|
| 10 |
+
from io import TextIOBase
|
| 11 |
+
|
| 12 |
+
import django
|
| 13 |
+
from django.core import checks
|
| 14 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 15 |
+
from django.core.management.color import color_style, no_style
|
| 16 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 17 |
+
|
| 18 |
+
ALL_CHECKS = "__all__"
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class CommandError(Exception):
|
| 22 |
+
"""
|
| 23 |
+
Exception class indicating a problem while executing a management
|
| 24 |
+
command.
|
| 25 |
+
|
| 26 |
+
If this exception is raised during the execution of a management
|
| 27 |
+
command, it will be caught and turned into a nicely-printed error
|
| 28 |
+
message to the appropriate output stream (i.e., stderr); as a
|
| 29 |
+
result, raising this exception (with a sensible description of the
|
| 30 |
+
error) is the preferred way to indicate that something has gone
|
| 31 |
+
wrong in the execution of a command.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def __init__(self, *args, returncode=1, **kwargs):
|
| 35 |
+
self.returncode = returncode
|
| 36 |
+
super().__init__(*args, **kwargs)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class SystemCheckError(CommandError):
|
| 40 |
+
"""
|
| 41 |
+
The system check framework detected unrecoverable errors.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
pass
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class CommandParser(ArgumentParser):
|
| 48 |
+
"""
|
| 49 |
+
Customized ArgumentParser class to improve some error messages and prevent
|
| 50 |
+
SystemExit in several occasions, as SystemExit is unacceptable when a
|
| 51 |
+
command is called programmatically.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def __init__(
|
| 55 |
+
self, *, missing_args_message=None, called_from_command_line=None, **kwargs
|
| 56 |
+
):
|
| 57 |
+
self.missing_args_message = missing_args_message
|
| 58 |
+
self.called_from_command_line = called_from_command_line
|
| 59 |
+
super().__init__(**kwargs)
|
| 60 |
+
|
| 61 |
+
def parse_args(self, args=None, namespace=None):
|
| 62 |
+
# Catch missing argument for a better error message
|
| 63 |
+
if self.missing_args_message and not (
|
| 64 |
+
args or any(not arg.startswith("-") for arg in args)
|
| 65 |
+
):
|
| 66 |
+
self.error(self.missing_args_message)
|
| 67 |
+
return super().parse_args(args, namespace)
|
| 68 |
+
|
| 69 |
+
def error(self, message):
|
| 70 |
+
if self.called_from_command_line:
|
| 71 |
+
super().error(message)
|
| 72 |
+
else:
|
| 73 |
+
raise CommandError("Error: %s" % message)
|
| 74 |
+
|
| 75 |
+
def add_subparsers(self, **kwargs):
|
| 76 |
+
parser_class = kwargs.get("parser_class", type(self))
|
| 77 |
+
if issubclass(parser_class, CommandParser):
|
| 78 |
+
kwargs["parser_class"] = partial(
|
| 79 |
+
parser_class,
|
| 80 |
+
called_from_command_line=self.called_from_command_line,
|
| 81 |
+
)
|
| 82 |
+
return super().add_subparsers(**kwargs)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def handle_default_options(options):
|
| 86 |
+
"""
|
| 87 |
+
Include any default options that all commands should accept here
|
| 88 |
+
so that ManagementUtility can handle them before searching for
|
| 89 |
+
user commands.
|
| 90 |
+
"""
|
| 91 |
+
if options.settings:
|
| 92 |
+
os.environ["DJANGO_SETTINGS_MODULE"] = options.settings
|
| 93 |
+
if options.pythonpath:
|
| 94 |
+
sys.path.insert(0, options.pythonpath)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def no_translations(handle_func):
|
| 98 |
+
"""Decorator that forces a command to run with translations deactivated."""
|
| 99 |
+
|
| 100 |
+
def wrapper(*args, **kwargs):
|
| 101 |
+
from django.utils import translation
|
| 102 |
+
|
| 103 |
+
saved_locale = translation.get_language()
|
| 104 |
+
translation.deactivate_all()
|
| 105 |
+
try:
|
| 106 |
+
res = handle_func(*args, **kwargs)
|
| 107 |
+
finally:
|
| 108 |
+
if saved_locale is not None:
|
| 109 |
+
translation.activate(saved_locale)
|
| 110 |
+
return res
|
| 111 |
+
|
| 112 |
+
return wrapper
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class DjangoHelpFormatter(HelpFormatter):
|
| 116 |
+
"""
|
| 117 |
+
Customized formatter so that command-specific arguments appear in the
|
| 118 |
+
--help output before arguments common to all commands.
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
show_last = {
|
| 122 |
+
"--version",
|
| 123 |
+
"--verbosity",
|
| 124 |
+
"--traceback",
|
| 125 |
+
"--settings",
|
| 126 |
+
"--pythonpath",
|
| 127 |
+
"--no-color",
|
| 128 |
+
"--force-color",
|
| 129 |
+
"--skip-checks",
|
| 130 |
+
}
|
| 131 |
+
|
| 132 |
+
def _reordered_actions(self, actions):
|
| 133 |
+
return sorted(
|
| 134 |
+
actions, key=lambda a: set(a.option_strings) & self.show_last != set()
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def add_usage(self, usage, actions, *args, **kwargs):
|
| 138 |
+
super().add_usage(usage, self._reordered_actions(actions), *args, **kwargs)
|
| 139 |
+
|
| 140 |
+
def add_arguments(self, actions):
|
| 141 |
+
super().add_arguments(self._reordered_actions(actions))
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class OutputWrapper(TextIOBase):
|
| 145 |
+
"""
|
| 146 |
+
Wrapper around stdout/stderr
|
| 147 |
+
"""
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def style_func(self):
|
| 151 |
+
return self._style_func
|
| 152 |
+
|
| 153 |
+
@style_func.setter
|
| 154 |
+
def style_func(self, style_func):
|
| 155 |
+
if style_func and self.isatty():
|
| 156 |
+
self._style_func = style_func
|
| 157 |
+
else:
|
| 158 |
+
self._style_func = lambda x: x
|
| 159 |
+
|
| 160 |
+
def __init__(self, out, ending="\n"):
|
| 161 |
+
self._out = out
|
| 162 |
+
self.style_func = None
|
| 163 |
+
self.ending = ending
|
| 164 |
+
|
| 165 |
+
def __getattr__(self, name):
|
| 166 |
+
return getattr(self._out, name)
|
| 167 |
+
|
| 168 |
+
def flush(self):
|
| 169 |
+
if hasattr(self._out, "flush"):
|
| 170 |
+
self._out.flush()
|
| 171 |
+
|
| 172 |
+
def isatty(self):
|
| 173 |
+
return hasattr(self._out, "isatty") and self._out.isatty()
|
| 174 |
+
|
| 175 |
+
def write(self, msg="", style_func=None, ending=None):
|
| 176 |
+
ending = self.ending if ending is None else ending
|
| 177 |
+
if ending and not msg.endswith(ending):
|
| 178 |
+
msg += ending
|
| 179 |
+
style_func = style_func or self.style_func
|
| 180 |
+
self._out.write(style_func(msg))
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
class BaseCommand:
|
| 184 |
+
"""
|
| 185 |
+
The base class from which all management commands ultimately
|
| 186 |
+
derive.
|
| 187 |
+
|
| 188 |
+
Use this class if you want access to all of the mechanisms which
|
| 189 |
+
parse the command-line arguments and work out what code to call in
|
| 190 |
+
response; if you don't need to change any of that behavior,
|
| 191 |
+
consider using one of the subclasses defined in this file.
|
| 192 |
+
|
| 193 |
+
If you are interested in overriding/customizing various aspects of
|
| 194 |
+
the command-parsing and -execution behavior, the normal flow works
|
| 195 |
+
as follows:
|
| 196 |
+
|
| 197 |
+
1. ``django-admin`` or ``manage.py`` loads the command class
|
| 198 |
+
and calls its ``run_from_argv()`` method.
|
| 199 |
+
|
| 200 |
+
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
|
| 201 |
+
an ``ArgumentParser`` for the arguments, parses them, performs
|
| 202 |
+
any environment changes requested by options like
|
| 203 |
+
``pythonpath``, and then calls the ``execute()`` method,
|
| 204 |
+
passing the parsed arguments.
|
| 205 |
+
|
| 206 |
+
3. The ``execute()`` method attempts to carry out the command by
|
| 207 |
+
calling the ``handle()`` method with the parsed arguments; any
|
| 208 |
+
output produced by ``handle()`` will be printed to standard
|
| 209 |
+
output and, if the command is intended to produce a block of
|
| 210 |
+
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
|
| 211 |
+
|
| 212 |
+
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
|
| 213 |
+
``CommandError``), ``run_from_argv()`` will instead print an error
|
| 214 |
+
message to ``stderr``.
|
| 215 |
+
|
| 216 |
+
Thus, the ``handle()`` method is typically the starting point for
|
| 217 |
+
subclasses; many built-in commands and command types either place
|
| 218 |
+
all of their logic in ``handle()``, or perform some additional
|
| 219 |
+
parsing work in ``handle()`` and then delegate from it to more
|
| 220 |
+
specialized methods as needed.
|
| 221 |
+
|
| 222 |
+
Several attributes affect behavior at various steps along the way:
|
| 223 |
+
|
| 224 |
+
``help``
|
| 225 |
+
A short description of the command, which will be printed in
|
| 226 |
+
help messages.
|
| 227 |
+
|
| 228 |
+
``output_transaction``
|
| 229 |
+
A boolean indicating whether the command outputs SQL
|
| 230 |
+
statements; if ``True``, the output will automatically be
|
| 231 |
+
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
|
| 232 |
+
``False``.
|
| 233 |
+
|
| 234 |
+
``requires_migrations_checks``
|
| 235 |
+
A boolean; if ``True``, the command prints a warning if the set of
|
| 236 |
+
migrations on disk don't match the migrations in the database.
|
| 237 |
+
|
| 238 |
+
``requires_system_checks``
|
| 239 |
+
A list or tuple of tags, e.g. [Tags.staticfiles, Tags.models]. System
|
| 240 |
+
checks registered in the chosen tags will be checked for errors prior
|
| 241 |
+
to executing the command. The value '__all__' can be used to specify
|
| 242 |
+
that all system checks should be performed. Default value is '__all__'.
|
| 243 |
+
|
| 244 |
+
To validate an individual application's models
|
| 245 |
+
rather than all applications' models, call
|
| 246 |
+
``self.check(app_configs)`` from ``handle()``, where ``app_configs``
|
| 247 |
+
is the list of application's configuration provided by the
|
| 248 |
+
app registry.
|
| 249 |
+
|
| 250 |
+
``stealth_options``
|
| 251 |
+
A tuple of any options the command uses which aren't defined by the
|
| 252 |
+
argument parser.
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
# Metadata about this command.
|
| 256 |
+
help = ""
|
| 257 |
+
|
| 258 |
+
# Configuration shortcuts that alter various logic.
|
| 259 |
+
_called_from_command_line = False
|
| 260 |
+
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
|
| 261 |
+
requires_migrations_checks = False
|
| 262 |
+
requires_system_checks = "__all__"
|
| 263 |
+
# Arguments, common to all commands, which aren't defined by the argument
|
| 264 |
+
# parser.
|
| 265 |
+
base_stealth_options = ("stderr", "stdout")
|
| 266 |
+
# Command-specific options not defined by the argument parser.
|
| 267 |
+
stealth_options = ()
|
| 268 |
+
suppressed_base_arguments = set()
|
| 269 |
+
|
| 270 |
+
def __init__(self, stdout=None, stderr=None, no_color=False, force_color=False):
|
| 271 |
+
self.stdout = OutputWrapper(stdout or sys.stdout)
|
| 272 |
+
self.stderr = OutputWrapper(stderr or sys.stderr)
|
| 273 |
+
if no_color and force_color:
|
| 274 |
+
raise CommandError("'no_color' and 'force_color' can't be used together.")
|
| 275 |
+
if no_color:
|
| 276 |
+
self.style = no_style()
|
| 277 |
+
else:
|
| 278 |
+
self.style = color_style(force_color)
|
| 279 |
+
self.stderr.style_func = self.style.ERROR
|
| 280 |
+
if (
|
| 281 |
+
not isinstance(self.requires_system_checks, (list, tuple))
|
| 282 |
+
and self.requires_system_checks != ALL_CHECKS
|
| 283 |
+
):
|
| 284 |
+
raise TypeError("requires_system_checks must be a list or tuple.")
|
| 285 |
+
|
| 286 |
+
def get_version(self):
|
| 287 |
+
"""
|
| 288 |
+
Return the Django version, which should be correct for all built-in
|
| 289 |
+
Django commands. User-supplied commands can override this method to
|
| 290 |
+
return their own version.
|
| 291 |
+
"""
|
| 292 |
+
return django.get_version()
|
| 293 |
+
|
| 294 |
+
def create_parser(self, prog_name, subcommand, **kwargs):
|
| 295 |
+
"""
|
| 296 |
+
Create and return the ``ArgumentParser`` which will be used to
|
| 297 |
+
parse the arguments to this command.
|
| 298 |
+
"""
|
| 299 |
+
kwargs.setdefault("formatter_class", DjangoHelpFormatter)
|
| 300 |
+
parser = CommandParser(
|
| 301 |
+
prog="%s %s" % (os.path.basename(prog_name), subcommand),
|
| 302 |
+
description=self.help or None,
|
| 303 |
+
missing_args_message=getattr(self, "missing_args_message", None),
|
| 304 |
+
called_from_command_line=getattr(self, "_called_from_command_line", None),
|
| 305 |
+
**kwargs,
|
| 306 |
+
)
|
| 307 |
+
self.add_base_argument(
|
| 308 |
+
parser,
|
| 309 |
+
"--version",
|
| 310 |
+
action="version",
|
| 311 |
+
version=self.get_version(),
|
| 312 |
+
help="Show program's version number and exit.",
|
| 313 |
+
)
|
| 314 |
+
self.add_base_argument(
|
| 315 |
+
parser,
|
| 316 |
+
"-v",
|
| 317 |
+
"--verbosity",
|
| 318 |
+
default=1,
|
| 319 |
+
type=int,
|
| 320 |
+
choices=[0, 1, 2, 3],
|
| 321 |
+
help=(
|
| 322 |
+
"Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, "
|
| 323 |
+
"3=very verbose output"
|
| 324 |
+
),
|
| 325 |
+
)
|
| 326 |
+
self.add_base_argument(
|
| 327 |
+
parser,
|
| 328 |
+
"--settings",
|
| 329 |
+
help=(
|
| 330 |
+
"The Python path to a settings module, e.g. "
|
| 331 |
+
'"myproject.settings.main". If this isn\'t provided, the '
|
| 332 |
+
"DJANGO_SETTINGS_MODULE environment variable will be used."
|
| 333 |
+
),
|
| 334 |
+
)
|
| 335 |
+
self.add_base_argument(
|
| 336 |
+
parser,
|
| 337 |
+
"--pythonpath",
|
| 338 |
+
help=(
|
| 339 |
+
"A directory to add to the Python path, e.g. "
|
| 340 |
+
'"/home/djangoprojects/myproject".'
|
| 341 |
+
),
|
| 342 |
+
)
|
| 343 |
+
self.add_base_argument(
|
| 344 |
+
parser,
|
| 345 |
+
"--traceback",
|
| 346 |
+
action="store_true",
|
| 347 |
+
help="Raise on CommandError exceptions.",
|
| 348 |
+
)
|
| 349 |
+
self.add_base_argument(
|
| 350 |
+
parser,
|
| 351 |
+
"--no-color",
|
| 352 |
+
action="store_true",
|
| 353 |
+
help="Don't colorize the command output.",
|
| 354 |
+
)
|
| 355 |
+
self.add_base_argument(
|
| 356 |
+
parser,
|
| 357 |
+
"--force-color",
|
| 358 |
+
action="store_true",
|
| 359 |
+
help="Force colorization of the command output.",
|
| 360 |
+
)
|
| 361 |
+
if self.requires_system_checks:
|
| 362 |
+
parser.add_argument(
|
| 363 |
+
"--skip-checks",
|
| 364 |
+
action="store_true",
|
| 365 |
+
help="Skip system checks.",
|
| 366 |
+
)
|
| 367 |
+
self.add_arguments(parser)
|
| 368 |
+
return parser
|
| 369 |
+
|
| 370 |
+
def add_arguments(self, parser):
|
| 371 |
+
"""
|
| 372 |
+
Entry point for subclassed commands to add custom arguments.
|
| 373 |
+
"""
|
| 374 |
+
pass
|
| 375 |
+
|
| 376 |
+
def add_base_argument(self, parser, *args, **kwargs):
|
| 377 |
+
"""
|
| 378 |
+
Call the parser's add_argument() method, suppressing the help text
|
| 379 |
+
according to BaseCommand.suppressed_base_arguments.
|
| 380 |
+
"""
|
| 381 |
+
for arg in args:
|
| 382 |
+
if arg in self.suppressed_base_arguments:
|
| 383 |
+
kwargs["help"] = argparse.SUPPRESS
|
| 384 |
+
break
|
| 385 |
+
parser.add_argument(*args, **kwargs)
|
| 386 |
+
|
| 387 |
+
def print_help(self, prog_name, subcommand):
|
| 388 |
+
"""
|
| 389 |
+
Print the help message for this command, derived from
|
| 390 |
+
``self.usage()``.
|
| 391 |
+
"""
|
| 392 |
+
parser = self.create_parser(prog_name, subcommand)
|
| 393 |
+
parser.print_help()
|
| 394 |
+
|
| 395 |
+
def run_from_argv(self, argv):
|
| 396 |
+
"""
|
| 397 |
+
Set up any environment changes requested (e.g., Python path
|
| 398 |
+
and Django settings), then run this command. If the
|
| 399 |
+
command raises a ``CommandError``, intercept it and print it sensibly
|
| 400 |
+
to stderr. If the ``--traceback`` option is present or the raised
|
| 401 |
+
``Exception`` is not ``CommandError``, raise it.
|
| 402 |
+
"""
|
| 403 |
+
self._called_from_command_line = True
|
| 404 |
+
parser = self.create_parser(argv[0], argv[1])
|
| 405 |
+
|
| 406 |
+
options = parser.parse_args(argv[2:])
|
| 407 |
+
cmd_options = vars(options)
|
| 408 |
+
# Move positional args out of options to mimic legacy optparse
|
| 409 |
+
args = cmd_options.pop("args", ())
|
| 410 |
+
handle_default_options(options)
|
| 411 |
+
try:
|
| 412 |
+
self.execute(*args, **cmd_options)
|
| 413 |
+
except CommandError as e:
|
| 414 |
+
if options.traceback:
|
| 415 |
+
raise
|
| 416 |
+
|
| 417 |
+
# SystemCheckError takes care of its own formatting.
|
| 418 |
+
if isinstance(e, SystemCheckError):
|
| 419 |
+
self.stderr.write(str(e), lambda x: x)
|
| 420 |
+
else:
|
| 421 |
+
self.stderr.write("%s: %s" % (e.__class__.__name__, e))
|
| 422 |
+
sys.exit(e.returncode)
|
| 423 |
+
finally:
|
| 424 |
+
try:
|
| 425 |
+
connections.close_all()
|
| 426 |
+
except ImproperlyConfigured:
|
| 427 |
+
# Ignore if connections aren't setup at this point (e.g. no
|
| 428 |
+
# configured settings).
|
| 429 |
+
pass
|
| 430 |
+
|
| 431 |
+
def execute(self, *args, **options):
|
| 432 |
+
"""
|
| 433 |
+
Try to execute this command, performing system checks if needed (as
|
| 434 |
+
controlled by the ``requires_system_checks`` attribute, except if
|
| 435 |
+
force-skipped).
|
| 436 |
+
"""
|
| 437 |
+
if options["force_color"] and options["no_color"]:
|
| 438 |
+
raise CommandError(
|
| 439 |
+
"The --no-color and --force-color options can't be used together."
|
| 440 |
+
)
|
| 441 |
+
if options["force_color"]:
|
| 442 |
+
self.style = color_style(force_color=True)
|
| 443 |
+
elif options["no_color"]:
|
| 444 |
+
self.style = no_style()
|
| 445 |
+
self.stderr.style_func = None
|
| 446 |
+
if options.get("stdout"):
|
| 447 |
+
self.stdout = OutputWrapper(options["stdout"])
|
| 448 |
+
if options.get("stderr"):
|
| 449 |
+
self.stderr = OutputWrapper(options["stderr"])
|
| 450 |
+
|
| 451 |
+
if self.requires_system_checks and not options["skip_checks"]:
|
| 452 |
+
if self.requires_system_checks == ALL_CHECKS:
|
| 453 |
+
self.check()
|
| 454 |
+
else:
|
| 455 |
+
self.check(tags=self.requires_system_checks)
|
| 456 |
+
if self.requires_migrations_checks:
|
| 457 |
+
self.check_migrations()
|
| 458 |
+
output = self.handle(*args, **options)
|
| 459 |
+
if output:
|
| 460 |
+
if self.output_transaction:
|
| 461 |
+
connection = connections[options.get("database", DEFAULT_DB_ALIAS)]
|
| 462 |
+
output = "%s\n%s\n%s" % (
|
| 463 |
+
self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()),
|
| 464 |
+
output,
|
| 465 |
+
self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()),
|
| 466 |
+
)
|
| 467 |
+
self.stdout.write(output)
|
| 468 |
+
return output
|
| 469 |
+
|
| 470 |
+
def check(
|
| 471 |
+
self,
|
| 472 |
+
app_configs=None,
|
| 473 |
+
tags=None,
|
| 474 |
+
display_num_errors=False,
|
| 475 |
+
include_deployment_checks=False,
|
| 476 |
+
fail_level=checks.ERROR,
|
| 477 |
+
databases=None,
|
| 478 |
+
):
|
| 479 |
+
"""
|
| 480 |
+
Use the system check framework to validate entire Django project.
|
| 481 |
+
Raise CommandError for any serious message (error or critical errors).
|
| 482 |
+
If there are only light messages (like warnings), print them to stderr
|
| 483 |
+
and don't raise an exception.
|
| 484 |
+
"""
|
| 485 |
+
all_issues = checks.run_checks(
|
| 486 |
+
app_configs=app_configs,
|
| 487 |
+
tags=tags,
|
| 488 |
+
include_deployment_checks=include_deployment_checks,
|
| 489 |
+
databases=databases,
|
| 490 |
+
)
|
| 491 |
+
|
| 492 |
+
header, body, footer = "", "", ""
|
| 493 |
+
visible_issue_count = 0 # excludes silenced warnings
|
| 494 |
+
|
| 495 |
+
if all_issues:
|
| 496 |
+
debugs = [
|
| 497 |
+
e for e in all_issues if e.level < checks.INFO and not e.is_silenced()
|
| 498 |
+
]
|
| 499 |
+
infos = [
|
| 500 |
+
e
|
| 501 |
+
for e in all_issues
|
| 502 |
+
if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()
|
| 503 |
+
]
|
| 504 |
+
warnings = [
|
| 505 |
+
e
|
| 506 |
+
for e in all_issues
|
| 507 |
+
if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()
|
| 508 |
+
]
|
| 509 |
+
errors = [
|
| 510 |
+
e
|
| 511 |
+
for e in all_issues
|
| 512 |
+
if checks.ERROR <= e.level < checks.CRITICAL and not e.is_silenced()
|
| 513 |
+
]
|
| 514 |
+
criticals = [
|
| 515 |
+
e
|
| 516 |
+
for e in all_issues
|
| 517 |
+
if checks.CRITICAL <= e.level and not e.is_silenced()
|
| 518 |
+
]
|
| 519 |
+
sorted_issues = [
|
| 520 |
+
(criticals, "CRITICALS"),
|
| 521 |
+
(errors, "ERRORS"),
|
| 522 |
+
(warnings, "WARNINGS"),
|
| 523 |
+
(infos, "INFOS"),
|
| 524 |
+
(debugs, "DEBUGS"),
|
| 525 |
+
]
|
| 526 |
+
|
| 527 |
+
for issues, group_name in sorted_issues:
|
| 528 |
+
if issues:
|
| 529 |
+
visible_issue_count += len(issues)
|
| 530 |
+
formatted = (
|
| 531 |
+
self.style.ERROR(str(e))
|
| 532 |
+
if e.is_serious()
|
| 533 |
+
else self.style.WARNING(str(e))
|
| 534 |
+
for e in issues
|
| 535 |
+
)
|
| 536 |
+
formatted = "\n".join(sorted(formatted))
|
| 537 |
+
body += "\n%s:\n%s\n" % (group_name, formatted)
|
| 538 |
+
|
| 539 |
+
if visible_issue_count:
|
| 540 |
+
header = "System check identified some issues:\n"
|
| 541 |
+
|
| 542 |
+
if display_num_errors:
|
| 543 |
+
if visible_issue_count:
|
| 544 |
+
footer += "\n"
|
| 545 |
+
footer += "System check identified %s (%s silenced)." % (
|
| 546 |
+
"no issues"
|
| 547 |
+
if visible_issue_count == 0
|
| 548 |
+
else "1 issue"
|
| 549 |
+
if visible_issue_count == 1
|
| 550 |
+
else "%s issues" % visible_issue_count,
|
| 551 |
+
len(all_issues) - visible_issue_count,
|
| 552 |
+
)
|
| 553 |
+
|
| 554 |
+
if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues):
|
| 555 |
+
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
|
| 556 |
+
raise SystemCheckError(msg)
|
| 557 |
+
else:
|
| 558 |
+
msg = header + body + footer
|
| 559 |
+
|
| 560 |
+
if msg:
|
| 561 |
+
if visible_issue_count:
|
| 562 |
+
self.stderr.write(msg, lambda x: x)
|
| 563 |
+
else:
|
| 564 |
+
self.stdout.write(msg)
|
| 565 |
+
|
| 566 |
+
def check_migrations(self):
|
| 567 |
+
"""
|
| 568 |
+
Print a warning if the set of migrations on disk don't match the
|
| 569 |
+
migrations in the database.
|
| 570 |
+
"""
|
| 571 |
+
from django.db.migrations.executor import MigrationExecutor
|
| 572 |
+
|
| 573 |
+
try:
|
| 574 |
+
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
|
| 575 |
+
except ImproperlyConfigured:
|
| 576 |
+
# No databases are configured (or the dummy one)
|
| 577 |
+
return
|
| 578 |
+
|
| 579 |
+
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
| 580 |
+
if plan:
|
| 581 |
+
apps_waiting_migration = sorted(
|
| 582 |
+
{migration.app_label for migration, backwards in plan}
|
| 583 |
+
)
|
| 584 |
+
self.stdout.write(
|
| 585 |
+
self.style.NOTICE(
|
| 586 |
+
"\nYou have %(unapplied_migration_count)s unapplied migration(s). "
|
| 587 |
+
"Your project may not work properly until you apply the "
|
| 588 |
+
"migrations for app(s): %(apps_waiting_migration)s."
|
| 589 |
+
% {
|
| 590 |
+
"unapplied_migration_count": len(plan),
|
| 591 |
+
"apps_waiting_migration": ", ".join(apps_waiting_migration),
|
| 592 |
+
}
|
| 593 |
+
)
|
| 594 |
+
)
|
| 595 |
+
self.stdout.write(
|
| 596 |
+
self.style.NOTICE("Run 'python manage.py migrate' to apply them.")
|
| 597 |
+
)
|
| 598 |
+
|
| 599 |
+
def handle(self, *args, **options):
|
| 600 |
+
"""
|
| 601 |
+
The actual logic of the command. Subclasses must implement
|
| 602 |
+
this method.
|
| 603 |
+
"""
|
| 604 |
+
raise NotImplementedError(
|
| 605 |
+
"subclasses of BaseCommand must provide a handle() method"
|
| 606 |
+
)
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
class AppCommand(BaseCommand):
|
| 610 |
+
"""
|
| 611 |
+
A management command which takes one or more installed application labels
|
| 612 |
+
as arguments, and does something with each of them.
|
| 613 |
+
|
| 614 |
+
Rather than implementing ``handle()``, subclasses must implement
|
| 615 |
+
``handle_app_config()``, which will be called once for each application.
|
| 616 |
+
"""
|
| 617 |
+
|
| 618 |
+
missing_args_message = "Enter at least one application label."
|
| 619 |
+
|
| 620 |
+
def add_arguments(self, parser):
|
| 621 |
+
parser.add_argument(
|
| 622 |
+
"args",
|
| 623 |
+
metavar="app_label",
|
| 624 |
+
nargs="+",
|
| 625 |
+
help="One or more application label.",
|
| 626 |
+
)
|
| 627 |
+
|
| 628 |
+
def handle(self, *app_labels, **options):
|
| 629 |
+
from django.apps import apps
|
| 630 |
+
|
| 631 |
+
try:
|
| 632 |
+
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
|
| 633 |
+
except (LookupError, ImportError) as e:
|
| 634 |
+
raise CommandError(
|
| 635 |
+
"%s. Are you sure your INSTALLED_APPS setting is correct?" % e
|
| 636 |
+
)
|
| 637 |
+
output = []
|
| 638 |
+
for app_config in app_configs:
|
| 639 |
+
app_output = self.handle_app_config(app_config, **options)
|
| 640 |
+
if app_output:
|
| 641 |
+
output.append(app_output)
|
| 642 |
+
return "\n".join(output)
|
| 643 |
+
|
| 644 |
+
def handle_app_config(self, app_config, **options):
|
| 645 |
+
"""
|
| 646 |
+
Perform the command's actions for app_config, an AppConfig instance
|
| 647 |
+
corresponding to an application label given on the command line.
|
| 648 |
+
"""
|
| 649 |
+
raise NotImplementedError(
|
| 650 |
+
"Subclasses of AppCommand must provide a handle_app_config() method."
|
| 651 |
+
)
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
class LabelCommand(BaseCommand):
|
| 655 |
+
"""
|
| 656 |
+
A management command which takes one or more arbitrary arguments
|
| 657 |
+
(labels) on the command line, and does something with each of
|
| 658 |
+
them.
|
| 659 |
+
|
| 660 |
+
Rather than implementing ``handle()``, subclasses must implement
|
| 661 |
+
``handle_label()``, which will be called once for each label.
|
| 662 |
+
|
| 663 |
+
If the arguments should be names of installed applications, use
|
| 664 |
+
``AppCommand`` instead.
|
| 665 |
+
"""
|
| 666 |
+
|
| 667 |
+
label = "label"
|
| 668 |
+
missing_args_message = "Enter at least one %s." % label
|
| 669 |
+
|
| 670 |
+
def add_arguments(self, parser):
|
| 671 |
+
parser.add_argument("args", metavar=self.label, nargs="+")
|
| 672 |
+
|
| 673 |
+
def handle(self, *labels, **options):
|
| 674 |
+
output = []
|
| 675 |
+
for label in labels:
|
| 676 |
+
label_output = self.handle_label(label, **options)
|
| 677 |
+
if label_output:
|
| 678 |
+
output.append(label_output)
|
| 679 |
+
return "\n".join(output)
|
| 680 |
+
|
| 681 |
+
def handle_label(self, label, **options):
|
| 682 |
+
"""
|
| 683 |
+
Perform the command's actions for ``label``, which will be the
|
| 684 |
+
string as given on the command line.
|
| 685 |
+
"""
|
| 686 |
+
raise NotImplementedError(
|
| 687 |
+
"subclasses of LabelCommand must provide a handle_label() method"
|
| 688 |
+
)
|
testbed/django__django/django/core/management/color.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Sets up the terminal color scheme.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import functools
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
from django.utils import termcolors
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import colorama
|
| 13 |
+
|
| 14 |
+
colorama.init()
|
| 15 |
+
except (ImportError, OSError):
|
| 16 |
+
HAS_COLORAMA = False
|
| 17 |
+
else:
|
| 18 |
+
HAS_COLORAMA = True
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def supports_color():
|
| 22 |
+
"""
|
| 23 |
+
Return True if the running system's terminal supports color,
|
| 24 |
+
and False otherwise.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def vt_codes_enabled_in_windows_registry():
|
| 28 |
+
"""
|
| 29 |
+
Check the Windows Registry to see if VT code handling has been enabled
|
| 30 |
+
by default, see https://superuser.com/a/1300251/447564.
|
| 31 |
+
"""
|
| 32 |
+
try:
|
| 33 |
+
# winreg is only available on Windows.
|
| 34 |
+
import winreg
|
| 35 |
+
except ImportError:
|
| 36 |
+
return False
|
| 37 |
+
else:
|
| 38 |
+
try:
|
| 39 |
+
reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console")
|
| 40 |
+
reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel")
|
| 41 |
+
except FileNotFoundError:
|
| 42 |
+
return False
|
| 43 |
+
else:
|
| 44 |
+
return reg_key_value == 1
|
| 45 |
+
|
| 46 |
+
# isatty is not always implemented, #6223.
|
| 47 |
+
is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
|
| 48 |
+
|
| 49 |
+
return is_a_tty and (
|
| 50 |
+
sys.platform != "win32"
|
| 51 |
+
or HAS_COLORAMA
|
| 52 |
+
or "ANSICON" in os.environ
|
| 53 |
+
or
|
| 54 |
+
# Windows Terminal supports VT codes.
|
| 55 |
+
"WT_SESSION" in os.environ
|
| 56 |
+
or
|
| 57 |
+
# Microsoft Visual Studio Code's built-in terminal supports colors.
|
| 58 |
+
os.environ.get("TERM_PROGRAM") == "vscode"
|
| 59 |
+
or vt_codes_enabled_in_windows_registry()
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Style:
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def make_style(config_string=""):
|
| 68 |
+
"""
|
| 69 |
+
Create a Style object from the given config_string.
|
| 70 |
+
|
| 71 |
+
If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used.
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
style = Style()
|
| 75 |
+
|
| 76 |
+
color_settings = termcolors.parse_color_setting(config_string)
|
| 77 |
+
|
| 78 |
+
# The nocolor palette has all available roles.
|
| 79 |
+
# Use that palette as the basis for populating
|
| 80 |
+
# the palette as defined in the environment.
|
| 81 |
+
for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
|
| 82 |
+
if color_settings:
|
| 83 |
+
format = color_settings.get(role, {})
|
| 84 |
+
style_func = termcolors.make_style(**format)
|
| 85 |
+
else:
|
| 86 |
+
|
| 87 |
+
def style_func(x):
|
| 88 |
+
return x
|
| 89 |
+
|
| 90 |
+
setattr(style, role, style_func)
|
| 91 |
+
|
| 92 |
+
# For backwards compatibility,
|
| 93 |
+
# set style for ERROR_OUTPUT == ERROR
|
| 94 |
+
style.ERROR_OUTPUT = style.ERROR
|
| 95 |
+
|
| 96 |
+
return style
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@functools.cache
|
| 100 |
+
def no_style():
|
| 101 |
+
"""
|
| 102 |
+
Return a Style object with no color scheme.
|
| 103 |
+
"""
|
| 104 |
+
return make_style("nocolor")
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def color_style(force_color=False):
|
| 108 |
+
"""
|
| 109 |
+
Return a Style object from the Django color scheme.
|
| 110 |
+
"""
|
| 111 |
+
if not force_color and not supports_color():
|
| 112 |
+
return no_style()
|
| 113 |
+
return make_style(os.environ.get("DJANGO_COLORS", ""))
|
testbed/django__django/django/core/management/commands/__init__.py
ADDED
|
File without changes
|
testbed/django__django/django/core/management/commands/check.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.apps import apps
|
| 2 |
+
from django.core import checks
|
| 3 |
+
from django.core.checks.registry import registry
|
| 4 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class Command(BaseCommand):
|
| 8 |
+
help = "Checks the entire Django project for potential problems."
|
| 9 |
+
|
| 10 |
+
requires_system_checks = []
|
| 11 |
+
|
| 12 |
+
def add_arguments(self, parser):
|
| 13 |
+
parser.add_argument("args", metavar="app_label", nargs="*")
|
| 14 |
+
parser.add_argument(
|
| 15 |
+
"--tag",
|
| 16 |
+
"-t",
|
| 17 |
+
action="append",
|
| 18 |
+
dest="tags",
|
| 19 |
+
help="Run only checks labeled with given tag.",
|
| 20 |
+
)
|
| 21 |
+
parser.add_argument(
|
| 22 |
+
"--list-tags",
|
| 23 |
+
action="store_true",
|
| 24 |
+
help="List available tags.",
|
| 25 |
+
)
|
| 26 |
+
parser.add_argument(
|
| 27 |
+
"--deploy",
|
| 28 |
+
action="store_true",
|
| 29 |
+
help="Check deployment settings.",
|
| 30 |
+
)
|
| 31 |
+
parser.add_argument(
|
| 32 |
+
"--fail-level",
|
| 33 |
+
default="ERROR",
|
| 34 |
+
choices=["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"],
|
| 35 |
+
help=(
|
| 36 |
+
"Message level that will cause the command to exit with a "
|
| 37 |
+
"non-zero status. Default is ERROR."
|
| 38 |
+
),
|
| 39 |
+
)
|
| 40 |
+
parser.add_argument(
|
| 41 |
+
"--database",
|
| 42 |
+
action="append",
|
| 43 |
+
dest="databases",
|
| 44 |
+
help="Run database related checks against these aliases.",
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
def handle(self, *app_labels, **options):
|
| 48 |
+
include_deployment_checks = options["deploy"]
|
| 49 |
+
if options["list_tags"]:
|
| 50 |
+
self.stdout.write(
|
| 51 |
+
"\n".join(sorted(registry.tags_available(include_deployment_checks)))
|
| 52 |
+
)
|
| 53 |
+
return
|
| 54 |
+
|
| 55 |
+
if app_labels:
|
| 56 |
+
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
|
| 57 |
+
else:
|
| 58 |
+
app_configs = None
|
| 59 |
+
|
| 60 |
+
tags = options["tags"]
|
| 61 |
+
if tags:
|
| 62 |
+
try:
|
| 63 |
+
invalid_tag = next(
|
| 64 |
+
tag
|
| 65 |
+
for tag in tags
|
| 66 |
+
if not checks.tag_exists(tag, include_deployment_checks)
|
| 67 |
+
)
|
| 68 |
+
except StopIteration:
|
| 69 |
+
# no invalid tags
|
| 70 |
+
pass
|
| 71 |
+
else:
|
| 72 |
+
raise CommandError(
|
| 73 |
+
'There is no system check with the "%s" tag.' % invalid_tag
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
self.check(
|
| 77 |
+
app_configs=app_configs,
|
| 78 |
+
tags=tags,
|
| 79 |
+
display_num_errors=True,
|
| 80 |
+
include_deployment_checks=include_deployment_checks,
|
| 81 |
+
fail_level=getattr(checks, options["fail_level"]),
|
| 82 |
+
databases=options["databases"],
|
| 83 |
+
)
|
testbed/django__django/django/core/management/commands/compilemessages.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import codecs
|
| 2 |
+
import concurrent.futures
|
| 3 |
+
import glob
|
| 4 |
+
import os
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 8 |
+
from django.core.management.utils import find_command, is_ignored_path, popen_wrapper
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def has_bom(fn):
|
| 12 |
+
with fn.open("rb") as f:
|
| 13 |
+
sample = f.read(4)
|
| 14 |
+
return sample.startswith(
|
| 15 |
+
(codecs.BOM_UTF8, codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE)
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def is_writable(path):
|
| 20 |
+
# Known side effect: updating file access/modified time to current time if
|
| 21 |
+
# it is writable.
|
| 22 |
+
try:
|
| 23 |
+
with open(path, "a"):
|
| 24 |
+
os.utime(path, None)
|
| 25 |
+
except OSError:
|
| 26 |
+
return False
|
| 27 |
+
return True
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Command(BaseCommand):
|
| 31 |
+
help = "Compiles .po files to .mo files for use with builtin gettext support."
|
| 32 |
+
|
| 33 |
+
requires_system_checks = []
|
| 34 |
+
|
| 35 |
+
program = "msgfmt"
|
| 36 |
+
program_options = ["--check-format"]
|
| 37 |
+
|
| 38 |
+
def add_arguments(self, parser):
|
| 39 |
+
parser.add_argument(
|
| 40 |
+
"--locale",
|
| 41 |
+
"-l",
|
| 42 |
+
action="append",
|
| 43 |
+
default=[],
|
| 44 |
+
help="Locale(s) to process (e.g. de_AT). Default is to process all. "
|
| 45 |
+
"Can be used multiple times.",
|
| 46 |
+
)
|
| 47 |
+
parser.add_argument(
|
| 48 |
+
"--exclude",
|
| 49 |
+
"-x",
|
| 50 |
+
action="append",
|
| 51 |
+
default=[],
|
| 52 |
+
help="Locales to exclude. Default is none. Can be used multiple times.",
|
| 53 |
+
)
|
| 54 |
+
parser.add_argument(
|
| 55 |
+
"--use-fuzzy",
|
| 56 |
+
"-f",
|
| 57 |
+
dest="fuzzy",
|
| 58 |
+
action="store_true",
|
| 59 |
+
help="Use fuzzy translations.",
|
| 60 |
+
)
|
| 61 |
+
parser.add_argument(
|
| 62 |
+
"--ignore",
|
| 63 |
+
"-i",
|
| 64 |
+
action="append",
|
| 65 |
+
dest="ignore_patterns",
|
| 66 |
+
default=[],
|
| 67 |
+
metavar="PATTERN",
|
| 68 |
+
help="Ignore directories matching this glob-style pattern. "
|
| 69 |
+
"Use multiple times to ignore more.",
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
def handle(self, **options):
|
| 73 |
+
locale = options["locale"]
|
| 74 |
+
exclude = options["exclude"]
|
| 75 |
+
ignore_patterns = set(options["ignore_patterns"])
|
| 76 |
+
self.verbosity = options["verbosity"]
|
| 77 |
+
if options["fuzzy"]:
|
| 78 |
+
self.program_options = self.program_options + ["-f"]
|
| 79 |
+
|
| 80 |
+
if find_command(self.program) is None:
|
| 81 |
+
raise CommandError(
|
| 82 |
+
"Can't find %s. Make sure you have GNU gettext "
|
| 83 |
+
"tools 0.15 or newer installed." % self.program
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
basedirs = [os.path.join("conf", "locale"), "locale"]
|
| 87 |
+
if os.environ.get("DJANGO_SETTINGS_MODULE"):
|
| 88 |
+
from django.conf import settings
|
| 89 |
+
|
| 90 |
+
basedirs.extend(settings.LOCALE_PATHS)
|
| 91 |
+
|
| 92 |
+
# Walk entire tree, looking for locale directories
|
| 93 |
+
for dirpath, dirnames, filenames in os.walk(".", topdown=True):
|
| 94 |
+
for dirname in dirnames:
|
| 95 |
+
if is_ignored_path(
|
| 96 |
+
os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns
|
| 97 |
+
):
|
| 98 |
+
dirnames.remove(dirname)
|
| 99 |
+
elif dirname == "locale":
|
| 100 |
+
basedirs.append(os.path.join(dirpath, dirname))
|
| 101 |
+
|
| 102 |
+
# Gather existing directories.
|
| 103 |
+
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
|
| 104 |
+
|
| 105 |
+
if not basedirs:
|
| 106 |
+
raise CommandError(
|
| 107 |
+
"This script should be run from the Django Git "
|
| 108 |
+
"checkout or your project or app tree, or with "
|
| 109 |
+
"the settings module specified."
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
# Build locale list
|
| 113 |
+
all_locales = []
|
| 114 |
+
for basedir in basedirs:
|
| 115 |
+
locale_dirs = filter(os.path.isdir, glob.glob("%s/*" % basedir))
|
| 116 |
+
all_locales.extend(map(os.path.basename, locale_dirs))
|
| 117 |
+
|
| 118 |
+
# Account for excluded locales
|
| 119 |
+
locales = locale or all_locales
|
| 120 |
+
locales = set(locales).difference(exclude)
|
| 121 |
+
|
| 122 |
+
self.has_errors = False
|
| 123 |
+
for basedir in basedirs:
|
| 124 |
+
if locales:
|
| 125 |
+
dirs = [
|
| 126 |
+
os.path.join(basedir, locale, "LC_MESSAGES") for locale in locales
|
| 127 |
+
]
|
| 128 |
+
else:
|
| 129 |
+
dirs = [basedir]
|
| 130 |
+
locations = []
|
| 131 |
+
for ldir in dirs:
|
| 132 |
+
for dirpath, dirnames, filenames in os.walk(ldir):
|
| 133 |
+
locations.extend(
|
| 134 |
+
(dirpath, f) for f in filenames if f.endswith(".po")
|
| 135 |
+
)
|
| 136 |
+
if locations:
|
| 137 |
+
self.compile_messages(locations)
|
| 138 |
+
|
| 139 |
+
if self.has_errors:
|
| 140 |
+
raise CommandError("compilemessages generated one or more errors.")
|
| 141 |
+
|
| 142 |
+
def compile_messages(self, locations):
|
| 143 |
+
"""
|
| 144 |
+
Locations is a list of tuples: [(directory, file), ...]
|
| 145 |
+
"""
|
| 146 |
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
| 147 |
+
futures = []
|
| 148 |
+
for i, (dirpath, f) in enumerate(locations):
|
| 149 |
+
po_path = Path(dirpath) / f
|
| 150 |
+
mo_path = po_path.with_suffix(".mo")
|
| 151 |
+
try:
|
| 152 |
+
if mo_path.stat().st_mtime >= po_path.stat().st_mtime:
|
| 153 |
+
if self.verbosity > 0:
|
| 154 |
+
self.stdout.write(
|
| 155 |
+
"File “%s” is already compiled and up to date."
|
| 156 |
+
% po_path
|
| 157 |
+
)
|
| 158 |
+
continue
|
| 159 |
+
except FileNotFoundError:
|
| 160 |
+
pass
|
| 161 |
+
if self.verbosity > 0:
|
| 162 |
+
self.stdout.write("processing file %s in %s" % (f, dirpath))
|
| 163 |
+
|
| 164 |
+
if has_bom(po_path):
|
| 165 |
+
self.stderr.write(
|
| 166 |
+
"The %s file has a BOM (Byte Order Mark). Django only "
|
| 167 |
+
"supports .po files encoded in UTF-8 and without any BOM."
|
| 168 |
+
% po_path
|
| 169 |
+
)
|
| 170 |
+
self.has_errors = True
|
| 171 |
+
continue
|
| 172 |
+
|
| 173 |
+
# Check writability on first location
|
| 174 |
+
if i == 0 and not is_writable(mo_path):
|
| 175 |
+
self.stderr.write(
|
| 176 |
+
"The po files under %s are in a seemingly not writable "
|
| 177 |
+
"location. mo files will not be updated/created." % dirpath
|
| 178 |
+
)
|
| 179 |
+
self.has_errors = True
|
| 180 |
+
return
|
| 181 |
+
|
| 182 |
+
args = [self.program, *self.program_options, "-o", mo_path, po_path]
|
| 183 |
+
futures.append(executor.submit(popen_wrapper, args))
|
| 184 |
+
|
| 185 |
+
for future in concurrent.futures.as_completed(futures):
|
| 186 |
+
output, errors, status = future.result()
|
| 187 |
+
if status:
|
| 188 |
+
if self.verbosity > 0:
|
| 189 |
+
if errors:
|
| 190 |
+
self.stderr.write(
|
| 191 |
+
"Execution of %s failed: %s" % (self.program, errors)
|
| 192 |
+
)
|
| 193 |
+
else:
|
| 194 |
+
self.stderr.write("Execution of %s failed" % self.program)
|
| 195 |
+
self.has_errors = True
|
testbed/django__django/django/core/management/commands/createcachetable.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
from django.core.cache import caches
|
| 3 |
+
from django.core.cache.backends.db import BaseDatabaseCache
|
| 4 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 5 |
+
from django.db import (
|
| 6 |
+
DEFAULT_DB_ALIAS,
|
| 7 |
+
DatabaseError,
|
| 8 |
+
connections,
|
| 9 |
+
models,
|
| 10 |
+
router,
|
| 11 |
+
transaction,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Command(BaseCommand):
|
| 16 |
+
help = "Creates the tables needed to use the SQL cache backend."
|
| 17 |
+
|
| 18 |
+
requires_system_checks = []
|
| 19 |
+
|
| 20 |
+
def add_arguments(self, parser):
|
| 21 |
+
parser.add_argument(
|
| 22 |
+
"args",
|
| 23 |
+
metavar="table_name",
|
| 24 |
+
nargs="*",
|
| 25 |
+
help=(
|
| 26 |
+
"Optional table names. Otherwise, settings.CACHES is used to find "
|
| 27 |
+
"cache tables."
|
| 28 |
+
),
|
| 29 |
+
)
|
| 30 |
+
parser.add_argument(
|
| 31 |
+
"--database",
|
| 32 |
+
default=DEFAULT_DB_ALIAS,
|
| 33 |
+
help="Nominates a database onto which the cache tables will be "
|
| 34 |
+
'installed. Defaults to the "default" database.',
|
| 35 |
+
)
|
| 36 |
+
parser.add_argument(
|
| 37 |
+
"--dry-run",
|
| 38 |
+
action="store_true",
|
| 39 |
+
help="Does not create the table, just prints the SQL that would be run.",
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
def handle(self, *tablenames, **options):
|
| 43 |
+
db = options["database"]
|
| 44 |
+
self.verbosity = options["verbosity"]
|
| 45 |
+
dry_run = options["dry_run"]
|
| 46 |
+
if tablenames:
|
| 47 |
+
# Legacy behavior, tablename specified as argument
|
| 48 |
+
for tablename in tablenames:
|
| 49 |
+
self.create_table(db, tablename, dry_run)
|
| 50 |
+
else:
|
| 51 |
+
for cache_alias in settings.CACHES:
|
| 52 |
+
cache = caches[cache_alias]
|
| 53 |
+
if isinstance(cache, BaseDatabaseCache):
|
| 54 |
+
self.create_table(db, cache._table, dry_run)
|
| 55 |
+
|
| 56 |
+
def create_table(self, database, tablename, dry_run):
|
| 57 |
+
cache = BaseDatabaseCache(tablename, {})
|
| 58 |
+
if not router.allow_migrate_model(database, cache.cache_model_class):
|
| 59 |
+
return
|
| 60 |
+
connection = connections[database]
|
| 61 |
+
|
| 62 |
+
if tablename in connection.introspection.table_names():
|
| 63 |
+
if self.verbosity > 0:
|
| 64 |
+
self.stdout.write("Cache table '%s' already exists." % tablename)
|
| 65 |
+
return
|
| 66 |
+
|
| 67 |
+
fields = (
|
| 68 |
+
# "key" is a reserved word in MySQL, so use "cache_key" instead.
|
| 69 |
+
models.CharField(
|
| 70 |
+
name="cache_key", max_length=255, unique=True, primary_key=True
|
| 71 |
+
),
|
| 72 |
+
models.TextField(name="value"),
|
| 73 |
+
models.DateTimeField(name="expires", db_index=True),
|
| 74 |
+
)
|
| 75 |
+
table_output = []
|
| 76 |
+
index_output = []
|
| 77 |
+
qn = connection.ops.quote_name
|
| 78 |
+
for f in fields:
|
| 79 |
+
field_output = [
|
| 80 |
+
qn(f.name),
|
| 81 |
+
f.db_type(connection=connection),
|
| 82 |
+
"%sNULL" % ("NOT " if not f.null else ""),
|
| 83 |
+
]
|
| 84 |
+
if f.primary_key:
|
| 85 |
+
field_output.append("PRIMARY KEY")
|
| 86 |
+
elif f.unique:
|
| 87 |
+
field_output.append("UNIQUE")
|
| 88 |
+
if f.db_index:
|
| 89 |
+
unique = "UNIQUE " if f.unique else ""
|
| 90 |
+
index_output.append(
|
| 91 |
+
"CREATE %sINDEX %s ON %s (%s);"
|
| 92 |
+
% (
|
| 93 |
+
unique,
|
| 94 |
+
qn("%s_%s" % (tablename, f.name)),
|
| 95 |
+
qn(tablename),
|
| 96 |
+
qn(f.name),
|
| 97 |
+
)
|
| 98 |
+
)
|
| 99 |
+
table_output.append(" ".join(field_output))
|
| 100 |
+
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
|
| 101 |
+
for i, line in enumerate(table_output):
|
| 102 |
+
full_statement.append(
|
| 103 |
+
" %s%s" % (line, "," if i < len(table_output) - 1 else "")
|
| 104 |
+
)
|
| 105 |
+
full_statement.append(");")
|
| 106 |
+
|
| 107 |
+
full_statement = "\n".join(full_statement)
|
| 108 |
+
|
| 109 |
+
if dry_run:
|
| 110 |
+
self.stdout.write(full_statement)
|
| 111 |
+
for statement in index_output:
|
| 112 |
+
self.stdout.write(statement)
|
| 113 |
+
return
|
| 114 |
+
|
| 115 |
+
with transaction.atomic(
|
| 116 |
+
using=database, savepoint=connection.features.can_rollback_ddl
|
| 117 |
+
):
|
| 118 |
+
with connection.cursor() as curs:
|
| 119 |
+
try:
|
| 120 |
+
curs.execute(full_statement)
|
| 121 |
+
except DatabaseError as e:
|
| 122 |
+
raise CommandError(
|
| 123 |
+
"Cache table '%s' could not be created.\nThe error was: %s."
|
| 124 |
+
% (tablename, e)
|
| 125 |
+
)
|
| 126 |
+
for statement in index_output:
|
| 127 |
+
curs.execute(statement)
|
| 128 |
+
|
| 129 |
+
if self.verbosity > 1:
|
| 130 |
+
self.stdout.write("Cache table '%s' created." % tablename)
|
testbed/django__django/django/core/management/commands/diffsettings.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.management.base import BaseCommand
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def module_to_dict(module, omittable=lambda k: k.startswith("_") or not k.isupper()):
|
| 5 |
+
"""Convert a module namespace to a Python dictionary."""
|
| 6 |
+
return {k: repr(getattr(module, k)) for k in dir(module) if not omittable(k)}
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Command(BaseCommand):
|
| 10 |
+
help = """Displays differences between the current settings.py and Django's
|
| 11 |
+
default settings."""
|
| 12 |
+
|
| 13 |
+
requires_system_checks = []
|
| 14 |
+
|
| 15 |
+
def add_arguments(self, parser):
|
| 16 |
+
parser.add_argument(
|
| 17 |
+
"--all",
|
| 18 |
+
action="store_true",
|
| 19 |
+
help=(
|
| 20 |
+
'Display all settings, regardless of their value. In "hash" '
|
| 21 |
+
'mode, default values are prefixed by "###".'
|
| 22 |
+
),
|
| 23 |
+
)
|
| 24 |
+
parser.add_argument(
|
| 25 |
+
"--default",
|
| 26 |
+
metavar="MODULE",
|
| 27 |
+
help=(
|
| 28 |
+
"The settings module to compare the current settings against. Leave "
|
| 29 |
+
"empty to compare against Django's default settings."
|
| 30 |
+
),
|
| 31 |
+
)
|
| 32 |
+
parser.add_argument(
|
| 33 |
+
"--output",
|
| 34 |
+
default="hash",
|
| 35 |
+
choices=("hash", "unified"),
|
| 36 |
+
help=(
|
| 37 |
+
"Selects the output format. 'hash' mode displays each changed "
|
| 38 |
+
"setting, with the settings that don't appear in the defaults "
|
| 39 |
+
"followed by ###. 'unified' mode prefixes the default setting "
|
| 40 |
+
"with a minus sign, followed by the changed setting prefixed "
|
| 41 |
+
"with a plus sign."
|
| 42 |
+
),
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
def handle(self, **options):
|
| 46 |
+
from django.conf import Settings, global_settings, settings
|
| 47 |
+
|
| 48 |
+
# Because settings are imported lazily, we need to explicitly load them.
|
| 49 |
+
if not settings.configured:
|
| 50 |
+
settings._setup()
|
| 51 |
+
|
| 52 |
+
user_settings = module_to_dict(settings._wrapped)
|
| 53 |
+
default = options["default"]
|
| 54 |
+
default_settings = module_to_dict(
|
| 55 |
+
Settings(default) if default else global_settings
|
| 56 |
+
)
|
| 57 |
+
output_func = {
|
| 58 |
+
"hash": self.output_hash,
|
| 59 |
+
"unified": self.output_unified,
|
| 60 |
+
}[options["output"]]
|
| 61 |
+
return "\n".join(output_func(user_settings, default_settings, **options))
|
| 62 |
+
|
| 63 |
+
def output_hash(self, user_settings, default_settings, **options):
|
| 64 |
+
# Inspired by Postfix's "postconf -n".
|
| 65 |
+
output = []
|
| 66 |
+
for key in sorted(user_settings):
|
| 67 |
+
if key not in default_settings:
|
| 68 |
+
output.append("%s = %s ###" % (key, user_settings[key]))
|
| 69 |
+
elif user_settings[key] != default_settings[key]:
|
| 70 |
+
output.append("%s = %s" % (key, user_settings[key]))
|
| 71 |
+
elif options["all"]:
|
| 72 |
+
output.append("### %s = %s" % (key, user_settings[key]))
|
| 73 |
+
return output
|
| 74 |
+
|
| 75 |
+
def output_unified(self, user_settings, default_settings, **options):
|
| 76 |
+
output = []
|
| 77 |
+
for key in sorted(user_settings):
|
| 78 |
+
if key not in default_settings:
|
| 79 |
+
output.append(
|
| 80 |
+
self.style.SUCCESS("+ %s = %s" % (key, user_settings[key]))
|
| 81 |
+
)
|
| 82 |
+
elif user_settings[key] != default_settings[key]:
|
| 83 |
+
output.append(
|
| 84 |
+
self.style.ERROR("- %s = %s" % (key, default_settings[key]))
|
| 85 |
+
)
|
| 86 |
+
output.append(
|
| 87 |
+
self.style.SUCCESS("+ %s = %s" % (key, user_settings[key]))
|
| 88 |
+
)
|
| 89 |
+
elif options["all"]:
|
| 90 |
+
output.append(" %s = %s" % (key, user_settings[key]))
|
| 91 |
+
return output
|
testbed/django__django/django/core/management/commands/dumpdata.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gzip
|
| 2 |
+
import os
|
| 3 |
+
import warnings
|
| 4 |
+
|
| 5 |
+
from django.apps import apps
|
| 6 |
+
from django.core import serializers
|
| 7 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 8 |
+
from django.core.management.utils import parse_apps_and_model_labels
|
| 9 |
+
from django.db import DEFAULT_DB_ALIAS, router
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import bz2
|
| 13 |
+
|
| 14 |
+
has_bz2 = True
|
| 15 |
+
except ImportError:
|
| 16 |
+
has_bz2 = False
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
import lzma
|
| 20 |
+
|
| 21 |
+
has_lzma = True
|
| 22 |
+
except ImportError:
|
| 23 |
+
has_lzma = False
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ProxyModelWarning(Warning):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Command(BaseCommand):
|
| 31 |
+
help = (
|
| 32 |
+
"Output the contents of the database as a fixture of the given format "
|
| 33 |
+
"(using each model's default manager unless --all is specified)."
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
def add_arguments(self, parser):
|
| 37 |
+
parser.add_argument(
|
| 38 |
+
"args",
|
| 39 |
+
metavar="app_label[.ModelName]",
|
| 40 |
+
nargs="*",
|
| 41 |
+
help=(
|
| 42 |
+
"Restricts dumped data to the specified app_label or "
|
| 43 |
+
"app_label.ModelName."
|
| 44 |
+
),
|
| 45 |
+
)
|
| 46 |
+
parser.add_argument(
|
| 47 |
+
"--format",
|
| 48 |
+
default="json",
|
| 49 |
+
help="Specifies the output serialization format for fixtures.",
|
| 50 |
+
)
|
| 51 |
+
parser.add_argument(
|
| 52 |
+
"--indent",
|
| 53 |
+
type=int,
|
| 54 |
+
help="Specifies the indent level to use when pretty-printing output.",
|
| 55 |
+
)
|
| 56 |
+
parser.add_argument(
|
| 57 |
+
"--database",
|
| 58 |
+
default=DEFAULT_DB_ALIAS,
|
| 59 |
+
help="Nominates a specific database to dump fixtures from. "
|
| 60 |
+
'Defaults to the "default" database.',
|
| 61 |
+
)
|
| 62 |
+
parser.add_argument(
|
| 63 |
+
"-e",
|
| 64 |
+
"--exclude",
|
| 65 |
+
action="append",
|
| 66 |
+
default=[],
|
| 67 |
+
help="An app_label or app_label.ModelName to exclude "
|
| 68 |
+
"(use multiple --exclude to exclude multiple apps/models).",
|
| 69 |
+
)
|
| 70 |
+
parser.add_argument(
|
| 71 |
+
"--natural-foreign",
|
| 72 |
+
action="store_true",
|
| 73 |
+
dest="use_natural_foreign_keys",
|
| 74 |
+
help="Use natural foreign keys if they are available.",
|
| 75 |
+
)
|
| 76 |
+
parser.add_argument(
|
| 77 |
+
"--natural-primary",
|
| 78 |
+
action="store_true",
|
| 79 |
+
dest="use_natural_primary_keys",
|
| 80 |
+
help="Use natural primary keys if they are available.",
|
| 81 |
+
)
|
| 82 |
+
parser.add_argument(
|
| 83 |
+
"-a",
|
| 84 |
+
"--all",
|
| 85 |
+
action="store_true",
|
| 86 |
+
dest="use_base_manager",
|
| 87 |
+
help=(
|
| 88 |
+
"Use Django's base manager to dump all models stored in the database, "
|
| 89 |
+
"including those that would otherwise be filtered or modified by a "
|
| 90 |
+
"custom manager."
|
| 91 |
+
),
|
| 92 |
+
)
|
| 93 |
+
parser.add_argument(
|
| 94 |
+
"--pks",
|
| 95 |
+
dest="primary_keys",
|
| 96 |
+
help="Only dump objects with given primary keys. Accepts a comma-separated "
|
| 97 |
+
"list of keys. This option only works when you specify one model.",
|
| 98 |
+
)
|
| 99 |
+
parser.add_argument(
|
| 100 |
+
"-o", "--output", help="Specifies file to which the output is written."
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
def handle(self, *app_labels, **options):
|
| 104 |
+
format = options["format"]
|
| 105 |
+
indent = options["indent"]
|
| 106 |
+
using = options["database"]
|
| 107 |
+
excludes = options["exclude"]
|
| 108 |
+
output = options["output"]
|
| 109 |
+
show_traceback = options["traceback"]
|
| 110 |
+
use_natural_foreign_keys = options["use_natural_foreign_keys"]
|
| 111 |
+
use_natural_primary_keys = options["use_natural_primary_keys"]
|
| 112 |
+
use_base_manager = options["use_base_manager"]
|
| 113 |
+
pks = options["primary_keys"]
|
| 114 |
+
|
| 115 |
+
if pks:
|
| 116 |
+
primary_keys = [pk.strip() for pk in pks.split(",")]
|
| 117 |
+
else:
|
| 118 |
+
primary_keys = []
|
| 119 |
+
|
| 120 |
+
excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
|
| 121 |
+
|
| 122 |
+
if not app_labels:
|
| 123 |
+
if primary_keys:
|
| 124 |
+
raise CommandError("You can only use --pks option with one model")
|
| 125 |
+
app_list = dict.fromkeys(
|
| 126 |
+
app_config
|
| 127 |
+
for app_config in apps.get_app_configs()
|
| 128 |
+
if app_config.models_module is not None
|
| 129 |
+
and app_config not in excluded_apps
|
| 130 |
+
)
|
| 131 |
+
else:
|
| 132 |
+
if len(app_labels) > 1 and primary_keys:
|
| 133 |
+
raise CommandError("You can only use --pks option with one model")
|
| 134 |
+
app_list = {}
|
| 135 |
+
for label in app_labels:
|
| 136 |
+
try:
|
| 137 |
+
app_label, model_label = label.split(".")
|
| 138 |
+
try:
|
| 139 |
+
app_config = apps.get_app_config(app_label)
|
| 140 |
+
except LookupError as e:
|
| 141 |
+
raise CommandError(str(e))
|
| 142 |
+
if app_config.models_module is None or app_config in excluded_apps:
|
| 143 |
+
continue
|
| 144 |
+
try:
|
| 145 |
+
model = app_config.get_model(model_label)
|
| 146 |
+
except LookupError:
|
| 147 |
+
raise CommandError(
|
| 148 |
+
"Unknown model: %s.%s" % (app_label, model_label)
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
app_list_value = app_list.setdefault(app_config, [])
|
| 152 |
+
|
| 153 |
+
# We may have previously seen an "all-models" request for
|
| 154 |
+
# this app (no model qualifier was given). In this case
|
| 155 |
+
# there is no need adding specific models to the list.
|
| 156 |
+
if app_list_value is not None and model not in app_list_value:
|
| 157 |
+
app_list_value.append(model)
|
| 158 |
+
except ValueError:
|
| 159 |
+
if primary_keys:
|
| 160 |
+
raise CommandError(
|
| 161 |
+
"You can only use --pks option with one model"
|
| 162 |
+
)
|
| 163 |
+
# This is just an app - no model qualifier
|
| 164 |
+
app_label = label
|
| 165 |
+
try:
|
| 166 |
+
app_config = apps.get_app_config(app_label)
|
| 167 |
+
except LookupError as e:
|
| 168 |
+
raise CommandError(str(e))
|
| 169 |
+
if app_config.models_module is None or app_config in excluded_apps:
|
| 170 |
+
continue
|
| 171 |
+
app_list[app_config] = None
|
| 172 |
+
|
| 173 |
+
# Check that the serialization format exists; this is a shortcut to
|
| 174 |
+
# avoid collating all the objects and _then_ failing.
|
| 175 |
+
if format not in serializers.get_public_serializer_formats():
|
| 176 |
+
try:
|
| 177 |
+
serializers.get_serializer(format)
|
| 178 |
+
except serializers.SerializerDoesNotExist:
|
| 179 |
+
pass
|
| 180 |
+
|
| 181 |
+
raise CommandError("Unknown serialization format: %s" % format)
|
| 182 |
+
|
| 183 |
+
def get_objects(count_only=False):
|
| 184 |
+
"""
|
| 185 |
+
Collate the objects to be serialized. If count_only is True, just
|
| 186 |
+
count the number of objects to be serialized.
|
| 187 |
+
"""
|
| 188 |
+
if use_natural_foreign_keys:
|
| 189 |
+
models = serializers.sort_dependencies(
|
| 190 |
+
app_list.items(), allow_cycles=True
|
| 191 |
+
)
|
| 192 |
+
else:
|
| 193 |
+
# There is no need to sort dependencies when natural foreign
|
| 194 |
+
# keys are not used.
|
| 195 |
+
models = []
|
| 196 |
+
for app_config, model_list in app_list.items():
|
| 197 |
+
if model_list is None:
|
| 198 |
+
models.extend(app_config.get_models())
|
| 199 |
+
else:
|
| 200 |
+
models.extend(model_list)
|
| 201 |
+
for model in models:
|
| 202 |
+
if model in excluded_models:
|
| 203 |
+
continue
|
| 204 |
+
if model._meta.proxy and model._meta.proxy_for_model not in models:
|
| 205 |
+
warnings.warn(
|
| 206 |
+
"%s is a proxy model and won't be serialized."
|
| 207 |
+
% model._meta.label,
|
| 208 |
+
category=ProxyModelWarning,
|
| 209 |
+
)
|
| 210 |
+
if not model._meta.proxy and router.allow_migrate_model(using, model):
|
| 211 |
+
if use_base_manager:
|
| 212 |
+
objects = model._base_manager
|
| 213 |
+
else:
|
| 214 |
+
objects = model._default_manager
|
| 215 |
+
|
| 216 |
+
queryset = objects.using(using).order_by(model._meta.pk.name)
|
| 217 |
+
if primary_keys:
|
| 218 |
+
queryset = queryset.filter(pk__in=primary_keys)
|
| 219 |
+
if count_only:
|
| 220 |
+
yield queryset.order_by().count()
|
| 221 |
+
else:
|
| 222 |
+
yield from queryset.iterator()
|
| 223 |
+
|
| 224 |
+
try:
|
| 225 |
+
self.stdout.ending = None
|
| 226 |
+
progress_output = None
|
| 227 |
+
object_count = 0
|
| 228 |
+
# If dumpdata is outputting to stdout, there is no way to display progress
|
| 229 |
+
if output and self.stdout.isatty() and options["verbosity"] > 0:
|
| 230 |
+
progress_output = self.stdout
|
| 231 |
+
object_count = sum(get_objects(count_only=True))
|
| 232 |
+
if output:
|
| 233 |
+
file_root, file_ext = os.path.splitext(output)
|
| 234 |
+
compression_formats = {
|
| 235 |
+
".bz2": (open, {}, file_root),
|
| 236 |
+
".gz": (gzip.open, {}, output),
|
| 237 |
+
".lzma": (open, {}, file_root),
|
| 238 |
+
".xz": (open, {}, file_root),
|
| 239 |
+
".zip": (open, {}, file_root),
|
| 240 |
+
}
|
| 241 |
+
if has_bz2:
|
| 242 |
+
compression_formats[".bz2"] = (bz2.open, {}, output)
|
| 243 |
+
if has_lzma:
|
| 244 |
+
compression_formats[".lzma"] = (
|
| 245 |
+
lzma.open,
|
| 246 |
+
{"format": lzma.FORMAT_ALONE},
|
| 247 |
+
output,
|
| 248 |
+
)
|
| 249 |
+
compression_formats[".xz"] = (lzma.open, {}, output)
|
| 250 |
+
try:
|
| 251 |
+
open_method, kwargs, file_path = compression_formats[file_ext]
|
| 252 |
+
except KeyError:
|
| 253 |
+
open_method, kwargs, file_path = (open, {}, output)
|
| 254 |
+
if file_path != output:
|
| 255 |
+
file_name = os.path.basename(file_path)
|
| 256 |
+
warnings.warn(
|
| 257 |
+
f"Unsupported file extension ({file_ext}). "
|
| 258 |
+
f"Fixtures saved in '{file_name}'.",
|
| 259 |
+
RuntimeWarning,
|
| 260 |
+
)
|
| 261 |
+
stream = open_method(file_path, "wt", **kwargs)
|
| 262 |
+
else:
|
| 263 |
+
stream = None
|
| 264 |
+
try:
|
| 265 |
+
serializers.serialize(
|
| 266 |
+
format,
|
| 267 |
+
get_objects(),
|
| 268 |
+
indent=indent,
|
| 269 |
+
use_natural_foreign_keys=use_natural_foreign_keys,
|
| 270 |
+
use_natural_primary_keys=use_natural_primary_keys,
|
| 271 |
+
stream=stream or self.stdout,
|
| 272 |
+
progress_output=progress_output,
|
| 273 |
+
object_count=object_count,
|
| 274 |
+
)
|
| 275 |
+
finally:
|
| 276 |
+
if stream:
|
| 277 |
+
stream.close()
|
| 278 |
+
except Exception as e:
|
| 279 |
+
if show_traceback:
|
| 280 |
+
raise
|
| 281 |
+
raise CommandError("Unable to serialize database: %s" % e)
|
testbed/django__django/django/core/management/commands/flush.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from importlib import import_module
|
| 2 |
+
|
| 3 |
+
from django.apps import apps
|
| 4 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 5 |
+
from django.core.management.color import no_style
|
| 6 |
+
from django.core.management.sql import emit_post_migrate_signal, sql_flush
|
| 7 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Command(BaseCommand):
|
| 11 |
+
help = (
|
| 12 |
+
"Removes ALL DATA from the database, including data added during "
|
| 13 |
+
'migrations. Does not achieve a "fresh install" state.'
|
| 14 |
+
)
|
| 15 |
+
stealth_options = ("reset_sequences", "allow_cascade", "inhibit_post_migrate")
|
| 16 |
+
|
| 17 |
+
def add_arguments(self, parser):
|
| 18 |
+
parser.add_argument(
|
| 19 |
+
"--noinput",
|
| 20 |
+
"--no-input",
|
| 21 |
+
action="store_false",
|
| 22 |
+
dest="interactive",
|
| 23 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 24 |
+
)
|
| 25 |
+
parser.add_argument(
|
| 26 |
+
"--database",
|
| 27 |
+
default=DEFAULT_DB_ALIAS,
|
| 28 |
+
help='Nominates a database to flush. Defaults to the "default" database.',
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
def handle(self, **options):
|
| 32 |
+
database = options["database"]
|
| 33 |
+
connection = connections[database]
|
| 34 |
+
verbosity = options["verbosity"]
|
| 35 |
+
interactive = options["interactive"]
|
| 36 |
+
# The following are stealth options used by Django's internals.
|
| 37 |
+
reset_sequences = options.get("reset_sequences", True)
|
| 38 |
+
allow_cascade = options.get("allow_cascade", False)
|
| 39 |
+
inhibit_post_migrate = options.get("inhibit_post_migrate", False)
|
| 40 |
+
|
| 41 |
+
self.style = no_style()
|
| 42 |
+
|
| 43 |
+
# Import the 'management' module within each installed app, to register
|
| 44 |
+
# dispatcher events.
|
| 45 |
+
for app_config in apps.get_app_configs():
|
| 46 |
+
try:
|
| 47 |
+
import_module(".management", app_config.name)
|
| 48 |
+
except ImportError:
|
| 49 |
+
pass
|
| 50 |
+
|
| 51 |
+
sql_list = sql_flush(
|
| 52 |
+
self.style,
|
| 53 |
+
connection,
|
| 54 |
+
reset_sequences=reset_sequences,
|
| 55 |
+
allow_cascade=allow_cascade,
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
if interactive:
|
| 59 |
+
confirm = input(
|
| 60 |
+
"""You have requested a flush of the database.
|
| 61 |
+
This will IRREVERSIBLY DESTROY all data currently in the "%s" database,
|
| 62 |
+
and return each table to an empty state.
|
| 63 |
+
Are you sure you want to do this?
|
| 64 |
+
|
| 65 |
+
Type 'yes' to continue, or 'no' to cancel: """
|
| 66 |
+
% connection.settings_dict["NAME"]
|
| 67 |
+
)
|
| 68 |
+
else:
|
| 69 |
+
confirm = "yes"
|
| 70 |
+
|
| 71 |
+
if confirm == "yes":
|
| 72 |
+
try:
|
| 73 |
+
connection.ops.execute_sql_flush(sql_list)
|
| 74 |
+
except Exception as exc:
|
| 75 |
+
raise CommandError(
|
| 76 |
+
"Database %s couldn't be flushed. Possible reasons:\n"
|
| 77 |
+
" * The database isn't running or isn't configured correctly.\n"
|
| 78 |
+
" * At least one of the expected database tables doesn't exist.\n"
|
| 79 |
+
" * The SQL was invalid.\n"
|
| 80 |
+
"Hint: Look at the output of 'django-admin sqlflush'. "
|
| 81 |
+
"That's the SQL this command wasn't able to run."
|
| 82 |
+
% (connection.settings_dict["NAME"],)
|
| 83 |
+
) from exc
|
| 84 |
+
|
| 85 |
+
# Empty sql_list may signify an empty database and post_migrate
|
| 86 |
+
# would then crash.
|
| 87 |
+
if sql_list and not inhibit_post_migrate:
|
| 88 |
+
# Emit the post migrate signal. This allows individual applications to
|
| 89 |
+
# respond as if the database had been migrated from scratch.
|
| 90 |
+
emit_post_migrate_signal(verbosity, interactive, database)
|
| 91 |
+
else:
|
| 92 |
+
self.stdout.write("Flush cancelled.")
|
testbed/django__django/django/core/management/commands/inspectdb.py
ADDED
|
@@ -0,0 +1,414 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import keyword
|
| 2 |
+
import re
|
| 3 |
+
|
| 4 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 5 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 6 |
+
from django.db.models.constants import LOOKUP_SEP
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Command(BaseCommand):
|
| 10 |
+
help = (
|
| 11 |
+
"Introspects the database tables in the given database and outputs a Django "
|
| 12 |
+
"model module."
|
| 13 |
+
)
|
| 14 |
+
requires_system_checks = []
|
| 15 |
+
stealth_options = ("table_name_filter",)
|
| 16 |
+
db_module = "django.db"
|
| 17 |
+
|
| 18 |
+
def add_arguments(self, parser):
|
| 19 |
+
parser.add_argument(
|
| 20 |
+
"table",
|
| 21 |
+
nargs="*",
|
| 22 |
+
type=str,
|
| 23 |
+
help="Selects what tables or views should be introspected.",
|
| 24 |
+
)
|
| 25 |
+
parser.add_argument(
|
| 26 |
+
"--database",
|
| 27 |
+
default=DEFAULT_DB_ALIAS,
|
| 28 |
+
help=(
|
| 29 |
+
'Nominates a database to introspect. Defaults to using the "default" '
|
| 30 |
+
"database."
|
| 31 |
+
),
|
| 32 |
+
)
|
| 33 |
+
parser.add_argument(
|
| 34 |
+
"--include-partitions",
|
| 35 |
+
action="store_true",
|
| 36 |
+
help="Also output models for partition tables.",
|
| 37 |
+
)
|
| 38 |
+
parser.add_argument(
|
| 39 |
+
"--include-views",
|
| 40 |
+
action="store_true",
|
| 41 |
+
help="Also output models for database views.",
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
def handle(self, **options):
|
| 45 |
+
try:
|
| 46 |
+
for line in self.handle_inspection(options):
|
| 47 |
+
self.stdout.write(line)
|
| 48 |
+
except NotImplementedError:
|
| 49 |
+
raise CommandError(
|
| 50 |
+
"Database inspection isn't supported for the currently selected "
|
| 51 |
+
"database backend."
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
def handle_inspection(self, options):
|
| 55 |
+
connection = connections[options["database"]]
|
| 56 |
+
# 'table_name_filter' is a stealth option
|
| 57 |
+
table_name_filter = options.get("table_name_filter")
|
| 58 |
+
|
| 59 |
+
with connection.cursor() as cursor:
|
| 60 |
+
yield "# This is an auto-generated Django model module."
|
| 61 |
+
yield "# You'll have to do the following manually to clean this up:"
|
| 62 |
+
yield "# * Rearrange models' order"
|
| 63 |
+
yield "# * Make sure each model has one field with primary_key=True"
|
| 64 |
+
yield (
|
| 65 |
+
"# * Make sure each ForeignKey and OneToOneField has `on_delete` set "
|
| 66 |
+
"to the desired behavior"
|
| 67 |
+
)
|
| 68 |
+
yield (
|
| 69 |
+
"# * Remove `managed = False` lines if you wish to allow "
|
| 70 |
+
"Django to create, modify, and delete the table"
|
| 71 |
+
)
|
| 72 |
+
yield (
|
| 73 |
+
"# Feel free to rename the models, but don't rename db_table values or "
|
| 74 |
+
"field names."
|
| 75 |
+
)
|
| 76 |
+
yield "from %s import models" % self.db_module
|
| 77 |
+
known_models = []
|
| 78 |
+
# Determine types of tables and/or views to be introspected.
|
| 79 |
+
types = {"t"}
|
| 80 |
+
if options["include_partitions"]:
|
| 81 |
+
types.add("p")
|
| 82 |
+
if options["include_views"]:
|
| 83 |
+
types.add("v")
|
| 84 |
+
table_info = connection.introspection.get_table_list(cursor)
|
| 85 |
+
table_info = {info.name: info for info in table_info if info.type in types}
|
| 86 |
+
|
| 87 |
+
for table_name in options["table"] or sorted(name for name in table_info):
|
| 88 |
+
if table_name_filter is not None and callable(table_name_filter):
|
| 89 |
+
if not table_name_filter(table_name):
|
| 90 |
+
continue
|
| 91 |
+
try:
|
| 92 |
+
try:
|
| 93 |
+
relations = connection.introspection.get_relations(
|
| 94 |
+
cursor, table_name
|
| 95 |
+
)
|
| 96 |
+
except NotImplementedError:
|
| 97 |
+
relations = {}
|
| 98 |
+
try:
|
| 99 |
+
constraints = connection.introspection.get_constraints(
|
| 100 |
+
cursor, table_name
|
| 101 |
+
)
|
| 102 |
+
except NotImplementedError:
|
| 103 |
+
constraints = {}
|
| 104 |
+
primary_key_columns = (
|
| 105 |
+
connection.introspection.get_primary_key_columns(
|
| 106 |
+
cursor, table_name
|
| 107 |
+
)
|
| 108 |
+
)
|
| 109 |
+
primary_key_column = (
|
| 110 |
+
primary_key_columns[0] if primary_key_columns else None
|
| 111 |
+
)
|
| 112 |
+
unique_columns = [
|
| 113 |
+
c["columns"][0]
|
| 114 |
+
for c in constraints.values()
|
| 115 |
+
if c["unique"] and len(c["columns"]) == 1
|
| 116 |
+
]
|
| 117 |
+
table_description = connection.introspection.get_table_description(
|
| 118 |
+
cursor, table_name
|
| 119 |
+
)
|
| 120 |
+
except Exception as e:
|
| 121 |
+
yield "# Unable to inspect table '%s'" % table_name
|
| 122 |
+
yield "# The error was: %s" % e
|
| 123 |
+
continue
|
| 124 |
+
|
| 125 |
+
model_name = self.normalize_table_name(table_name)
|
| 126 |
+
yield ""
|
| 127 |
+
yield ""
|
| 128 |
+
yield "class %s(models.Model):" % model_name
|
| 129 |
+
known_models.append(model_name)
|
| 130 |
+
used_column_names = [] # Holds column names used in the table so far
|
| 131 |
+
column_to_field_name = {} # Maps column names to names of model fields
|
| 132 |
+
used_relations = set() # Holds foreign relations used in the table.
|
| 133 |
+
for row in table_description:
|
| 134 |
+
comment_notes = (
|
| 135 |
+
[]
|
| 136 |
+
) # Holds Field notes, to be displayed in a Python comment.
|
| 137 |
+
extra_params = {} # Holds Field parameters such as 'db_column'.
|
| 138 |
+
column_name = row.name
|
| 139 |
+
is_relation = column_name in relations
|
| 140 |
+
|
| 141 |
+
att_name, params, notes = self.normalize_col_name(
|
| 142 |
+
column_name, used_column_names, is_relation
|
| 143 |
+
)
|
| 144 |
+
extra_params.update(params)
|
| 145 |
+
comment_notes.extend(notes)
|
| 146 |
+
|
| 147 |
+
used_column_names.append(att_name)
|
| 148 |
+
column_to_field_name[column_name] = att_name
|
| 149 |
+
|
| 150 |
+
# Add primary_key and unique, if necessary.
|
| 151 |
+
if column_name == primary_key_column:
|
| 152 |
+
extra_params["primary_key"] = True
|
| 153 |
+
if len(primary_key_columns) > 1:
|
| 154 |
+
comment_notes.append(
|
| 155 |
+
"The composite primary key (%s) found, that is not "
|
| 156 |
+
"supported. The first column is selected."
|
| 157 |
+
% ", ".join(primary_key_columns)
|
| 158 |
+
)
|
| 159 |
+
elif column_name in unique_columns:
|
| 160 |
+
extra_params["unique"] = True
|
| 161 |
+
|
| 162 |
+
if is_relation:
|
| 163 |
+
ref_db_column, ref_db_table = relations[column_name]
|
| 164 |
+
if extra_params.pop("unique", False) or extra_params.get(
|
| 165 |
+
"primary_key"
|
| 166 |
+
):
|
| 167 |
+
rel_type = "OneToOneField"
|
| 168 |
+
else:
|
| 169 |
+
rel_type = "ForeignKey"
|
| 170 |
+
ref_pk_column = (
|
| 171 |
+
connection.introspection.get_primary_key_column(
|
| 172 |
+
cursor, ref_db_table
|
| 173 |
+
)
|
| 174 |
+
)
|
| 175 |
+
if ref_pk_column and ref_pk_column != ref_db_column:
|
| 176 |
+
extra_params["to_field"] = ref_db_column
|
| 177 |
+
rel_to = (
|
| 178 |
+
"self"
|
| 179 |
+
if ref_db_table == table_name
|
| 180 |
+
else self.normalize_table_name(ref_db_table)
|
| 181 |
+
)
|
| 182 |
+
if rel_to in known_models:
|
| 183 |
+
field_type = "%s(%s" % (rel_type, rel_to)
|
| 184 |
+
else:
|
| 185 |
+
field_type = "%s('%s'" % (rel_type, rel_to)
|
| 186 |
+
if rel_to in used_relations:
|
| 187 |
+
extra_params["related_name"] = "%s_%s_set" % (
|
| 188 |
+
model_name.lower(),
|
| 189 |
+
att_name,
|
| 190 |
+
)
|
| 191 |
+
used_relations.add(rel_to)
|
| 192 |
+
else:
|
| 193 |
+
# Calling `get_field_type` to get the field type string and any
|
| 194 |
+
# additional parameters and notes.
|
| 195 |
+
field_type, field_params, field_notes = self.get_field_type(
|
| 196 |
+
connection, table_name, row
|
| 197 |
+
)
|
| 198 |
+
extra_params.update(field_params)
|
| 199 |
+
comment_notes.extend(field_notes)
|
| 200 |
+
|
| 201 |
+
field_type += "("
|
| 202 |
+
|
| 203 |
+
# Don't output 'id = meta.AutoField(primary_key=True)', because
|
| 204 |
+
# that's assumed if it doesn't exist.
|
| 205 |
+
if att_name == "id" and extra_params == {"primary_key": True}:
|
| 206 |
+
if field_type == "AutoField(":
|
| 207 |
+
continue
|
| 208 |
+
elif (
|
| 209 |
+
field_type
|
| 210 |
+
== connection.features.introspected_field_types["AutoField"]
|
| 211 |
+
+ "("
|
| 212 |
+
):
|
| 213 |
+
comment_notes.append("AutoField?")
|
| 214 |
+
|
| 215 |
+
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
|
| 216 |
+
# table description.
|
| 217 |
+
if row.null_ok: # If it's NULL...
|
| 218 |
+
extra_params["blank"] = True
|
| 219 |
+
extra_params["null"] = True
|
| 220 |
+
|
| 221 |
+
field_desc = "%s = %s%s" % (
|
| 222 |
+
att_name,
|
| 223 |
+
# Custom fields will have a dotted path
|
| 224 |
+
"" if "." in field_type else "models.",
|
| 225 |
+
field_type,
|
| 226 |
+
)
|
| 227 |
+
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
|
| 228 |
+
field_desc += ", models.DO_NOTHING"
|
| 229 |
+
|
| 230 |
+
# Add comment.
|
| 231 |
+
if connection.features.supports_comments and row.comment:
|
| 232 |
+
extra_params["db_comment"] = row.comment
|
| 233 |
+
|
| 234 |
+
if extra_params:
|
| 235 |
+
if not field_desc.endswith("("):
|
| 236 |
+
field_desc += ", "
|
| 237 |
+
field_desc += ", ".join(
|
| 238 |
+
"%s=%r" % (k, v) for k, v in extra_params.items()
|
| 239 |
+
)
|
| 240 |
+
field_desc += ")"
|
| 241 |
+
if comment_notes:
|
| 242 |
+
field_desc += " # " + " ".join(comment_notes)
|
| 243 |
+
yield " %s" % field_desc
|
| 244 |
+
comment = None
|
| 245 |
+
if info := table_info.get(table_name):
|
| 246 |
+
is_view = info.type == "v"
|
| 247 |
+
is_partition = info.type == "p"
|
| 248 |
+
if connection.features.supports_comments:
|
| 249 |
+
comment = info.comment
|
| 250 |
+
else:
|
| 251 |
+
is_view = False
|
| 252 |
+
is_partition = False
|
| 253 |
+
yield from self.get_meta(
|
| 254 |
+
table_name,
|
| 255 |
+
constraints,
|
| 256 |
+
column_to_field_name,
|
| 257 |
+
is_view,
|
| 258 |
+
is_partition,
|
| 259 |
+
comment,
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
def normalize_col_name(self, col_name, used_column_names, is_relation):
|
| 263 |
+
"""
|
| 264 |
+
Modify the column name to make it Python-compatible as a field name
|
| 265 |
+
"""
|
| 266 |
+
field_params = {}
|
| 267 |
+
field_notes = []
|
| 268 |
+
|
| 269 |
+
new_name = col_name.lower()
|
| 270 |
+
if new_name != col_name:
|
| 271 |
+
field_notes.append("Field name made lowercase.")
|
| 272 |
+
|
| 273 |
+
if is_relation:
|
| 274 |
+
if new_name.endswith("_id"):
|
| 275 |
+
new_name = new_name.removesuffix("_id")
|
| 276 |
+
else:
|
| 277 |
+
field_params["db_column"] = col_name
|
| 278 |
+
|
| 279 |
+
new_name, num_repl = re.subn(r"\W", "_", new_name)
|
| 280 |
+
if num_repl > 0:
|
| 281 |
+
field_notes.append("Field renamed to remove unsuitable characters.")
|
| 282 |
+
|
| 283 |
+
if new_name.find(LOOKUP_SEP) >= 0:
|
| 284 |
+
while new_name.find(LOOKUP_SEP) >= 0:
|
| 285 |
+
new_name = new_name.replace(LOOKUP_SEP, "_")
|
| 286 |
+
if col_name.lower().find(LOOKUP_SEP) >= 0:
|
| 287 |
+
# Only add the comment if the double underscore was in the original name
|
| 288 |
+
field_notes.append(
|
| 289 |
+
"Field renamed because it contained more than one '_' in a row."
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
if new_name.startswith("_"):
|
| 293 |
+
new_name = "field%s" % new_name
|
| 294 |
+
field_notes.append("Field renamed because it started with '_'.")
|
| 295 |
+
|
| 296 |
+
if new_name.endswith("_"):
|
| 297 |
+
new_name = "%sfield" % new_name
|
| 298 |
+
field_notes.append("Field renamed because it ended with '_'.")
|
| 299 |
+
|
| 300 |
+
if keyword.iskeyword(new_name):
|
| 301 |
+
new_name += "_field"
|
| 302 |
+
field_notes.append("Field renamed because it was a Python reserved word.")
|
| 303 |
+
|
| 304 |
+
if new_name[0].isdigit():
|
| 305 |
+
new_name = "number_%s" % new_name
|
| 306 |
+
field_notes.append(
|
| 307 |
+
"Field renamed because it wasn't a valid Python identifier."
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
if new_name in used_column_names:
|
| 311 |
+
num = 0
|
| 312 |
+
while "%s_%d" % (new_name, num) in used_column_names:
|
| 313 |
+
num += 1
|
| 314 |
+
new_name = "%s_%d" % (new_name, num)
|
| 315 |
+
field_notes.append("Field renamed because of name conflict.")
|
| 316 |
+
|
| 317 |
+
if col_name != new_name and field_notes:
|
| 318 |
+
field_params["db_column"] = col_name
|
| 319 |
+
|
| 320 |
+
return new_name, field_params, field_notes
|
| 321 |
+
|
| 322 |
+
def normalize_table_name(self, table_name):
|
| 323 |
+
"""Translate the table name to a Python-compatible model name."""
|
| 324 |
+
return re.sub(r"[^a-zA-Z0-9]", "", table_name.title())
|
| 325 |
+
|
| 326 |
+
def get_field_type(self, connection, table_name, row):
|
| 327 |
+
"""
|
| 328 |
+
Given the database connection, the table name, and the cursor row
|
| 329 |
+
description, this routine will return the given field type name, as
|
| 330 |
+
well as any additional keyword parameters and notes for the field.
|
| 331 |
+
"""
|
| 332 |
+
field_params = {}
|
| 333 |
+
field_notes = []
|
| 334 |
+
|
| 335 |
+
try:
|
| 336 |
+
field_type = connection.introspection.get_field_type(row.type_code, row)
|
| 337 |
+
except KeyError:
|
| 338 |
+
field_type = "TextField"
|
| 339 |
+
field_notes.append("This field type is a guess.")
|
| 340 |
+
|
| 341 |
+
# Add max_length for all CharFields.
|
| 342 |
+
if field_type == "CharField" and row.display_size:
|
| 343 |
+
if (size := int(row.display_size)) and size > 0:
|
| 344 |
+
field_params["max_length"] = size
|
| 345 |
+
|
| 346 |
+
if field_type in {"CharField", "TextField"} and row.collation:
|
| 347 |
+
field_params["db_collation"] = row.collation
|
| 348 |
+
|
| 349 |
+
if field_type == "DecimalField":
|
| 350 |
+
if row.precision is None or row.scale is None:
|
| 351 |
+
field_notes.append(
|
| 352 |
+
"max_digits and decimal_places have been guessed, as this "
|
| 353 |
+
"database handles decimal fields as float"
|
| 354 |
+
)
|
| 355 |
+
field_params["max_digits"] = (
|
| 356 |
+
row.precision if row.precision is not None else 10
|
| 357 |
+
)
|
| 358 |
+
field_params["decimal_places"] = (
|
| 359 |
+
row.scale if row.scale is not None else 5
|
| 360 |
+
)
|
| 361 |
+
else:
|
| 362 |
+
field_params["max_digits"] = row.precision
|
| 363 |
+
field_params["decimal_places"] = row.scale
|
| 364 |
+
|
| 365 |
+
return field_type, field_params, field_notes
|
| 366 |
+
|
| 367 |
+
def get_meta(
|
| 368 |
+
self,
|
| 369 |
+
table_name,
|
| 370 |
+
constraints,
|
| 371 |
+
column_to_field_name,
|
| 372 |
+
is_view,
|
| 373 |
+
is_partition,
|
| 374 |
+
comment,
|
| 375 |
+
):
|
| 376 |
+
"""
|
| 377 |
+
Return a sequence comprising the lines of code necessary
|
| 378 |
+
to construct the inner Meta class for the model corresponding
|
| 379 |
+
to the given database table name.
|
| 380 |
+
"""
|
| 381 |
+
unique_together = []
|
| 382 |
+
has_unsupported_constraint = False
|
| 383 |
+
for params in constraints.values():
|
| 384 |
+
if params["unique"]:
|
| 385 |
+
columns = params["columns"]
|
| 386 |
+
if None in columns:
|
| 387 |
+
has_unsupported_constraint = True
|
| 388 |
+
columns = [
|
| 389 |
+
x for x in columns if x is not None and x in column_to_field_name
|
| 390 |
+
]
|
| 391 |
+
if len(columns) > 1:
|
| 392 |
+
unique_together.append(
|
| 393 |
+
str(tuple(column_to_field_name[c] for c in columns))
|
| 394 |
+
)
|
| 395 |
+
if is_view:
|
| 396 |
+
managed_comment = " # Created from a view. Don't remove."
|
| 397 |
+
elif is_partition:
|
| 398 |
+
managed_comment = " # Created from a partition. Don't remove."
|
| 399 |
+
else:
|
| 400 |
+
managed_comment = ""
|
| 401 |
+
meta = [""]
|
| 402 |
+
if has_unsupported_constraint:
|
| 403 |
+
meta.append(" # A unique constraint could not be introspected.")
|
| 404 |
+
meta += [
|
| 405 |
+
" class Meta:",
|
| 406 |
+
" managed = False%s" % managed_comment,
|
| 407 |
+
" db_table = %r" % table_name,
|
| 408 |
+
]
|
| 409 |
+
if unique_together:
|
| 410 |
+
tup = "(" + ", ".join(unique_together) + ",)"
|
| 411 |
+
meta += [" unique_together = %s" % tup]
|
| 412 |
+
if comment:
|
| 413 |
+
meta += [f" db_table_comment = {comment!r}"]
|
| 414 |
+
return meta
|
testbed/django__django/django/core/management/commands/loaddata.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import glob
|
| 3 |
+
import gzip
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
import zipfile
|
| 8 |
+
from itertools import product
|
| 9 |
+
|
| 10 |
+
from django.apps import apps
|
| 11 |
+
from django.conf import settings
|
| 12 |
+
from django.core import serializers
|
| 13 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 14 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 15 |
+
from django.core.management.color import no_style
|
| 16 |
+
from django.core.management.utils import parse_apps_and_model_labels
|
| 17 |
+
from django.db import (
|
| 18 |
+
DEFAULT_DB_ALIAS,
|
| 19 |
+
DatabaseError,
|
| 20 |
+
IntegrityError,
|
| 21 |
+
connections,
|
| 22 |
+
router,
|
| 23 |
+
transaction,
|
| 24 |
+
)
|
| 25 |
+
from django.utils.functional import cached_property
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
import bz2
|
| 29 |
+
|
| 30 |
+
has_bz2 = True
|
| 31 |
+
except ImportError:
|
| 32 |
+
has_bz2 = False
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
import lzma
|
| 36 |
+
|
| 37 |
+
has_lzma = True
|
| 38 |
+
except ImportError:
|
| 39 |
+
has_lzma = False
|
| 40 |
+
|
| 41 |
+
READ_STDIN = "-"
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class Command(BaseCommand):
|
| 45 |
+
help = "Installs the named fixture(s) in the database."
|
| 46 |
+
missing_args_message = (
|
| 47 |
+
"No database fixture specified. Please provide the path of at least "
|
| 48 |
+
"one fixture in the command line."
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
def add_arguments(self, parser):
|
| 52 |
+
parser.add_argument(
|
| 53 |
+
"args", metavar="fixture", nargs="+", help="Fixture labels."
|
| 54 |
+
)
|
| 55 |
+
parser.add_argument(
|
| 56 |
+
"--database",
|
| 57 |
+
default=DEFAULT_DB_ALIAS,
|
| 58 |
+
help=(
|
| 59 |
+
"Nominates a specific database to load fixtures into. Defaults to the "
|
| 60 |
+
'"default" database.'
|
| 61 |
+
),
|
| 62 |
+
)
|
| 63 |
+
parser.add_argument(
|
| 64 |
+
"--app",
|
| 65 |
+
dest="app_label",
|
| 66 |
+
help="Only look for fixtures in the specified app.",
|
| 67 |
+
)
|
| 68 |
+
parser.add_argument(
|
| 69 |
+
"--ignorenonexistent",
|
| 70 |
+
"-i",
|
| 71 |
+
action="store_true",
|
| 72 |
+
dest="ignore",
|
| 73 |
+
help="Ignores entries in the serialized data for fields that do not "
|
| 74 |
+
"currently exist on the model.",
|
| 75 |
+
)
|
| 76 |
+
parser.add_argument(
|
| 77 |
+
"-e",
|
| 78 |
+
"--exclude",
|
| 79 |
+
action="append",
|
| 80 |
+
default=[],
|
| 81 |
+
help=(
|
| 82 |
+
"An app_label or app_label.ModelName to exclude. Can be used multiple "
|
| 83 |
+
"times."
|
| 84 |
+
),
|
| 85 |
+
)
|
| 86 |
+
parser.add_argument(
|
| 87 |
+
"--format",
|
| 88 |
+
help="Format of serialized data when reading from stdin.",
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
def handle(self, *fixture_labels, **options):
|
| 92 |
+
self.ignore = options["ignore"]
|
| 93 |
+
self.using = options["database"]
|
| 94 |
+
self.app_label = options["app_label"]
|
| 95 |
+
self.verbosity = options["verbosity"]
|
| 96 |
+
self.excluded_models, self.excluded_apps = parse_apps_and_model_labels(
|
| 97 |
+
options["exclude"]
|
| 98 |
+
)
|
| 99 |
+
self.format = options["format"]
|
| 100 |
+
|
| 101 |
+
with transaction.atomic(using=self.using):
|
| 102 |
+
self.loaddata(fixture_labels)
|
| 103 |
+
|
| 104 |
+
# Close the DB connection -- unless we're still in a transaction. This
|
| 105 |
+
# is required as a workaround for an edge case in MySQL: if the same
|
| 106 |
+
# connection is used to create tables, load data, and query, the query
|
| 107 |
+
# can return incorrect results. See Django #7572, MySQL #37735.
|
| 108 |
+
if transaction.get_autocommit(self.using):
|
| 109 |
+
connections[self.using].close()
|
| 110 |
+
|
| 111 |
+
@cached_property
|
| 112 |
+
def compression_formats(self):
|
| 113 |
+
"""A dict mapping format names to (open function, mode arg) tuples."""
|
| 114 |
+
# Forcing binary mode may be revisited after dropping Python 2 support
|
| 115 |
+
# (see #22399).
|
| 116 |
+
compression_formats = {
|
| 117 |
+
None: (open, "rb"),
|
| 118 |
+
"gz": (gzip.GzipFile, "rb"),
|
| 119 |
+
"zip": (SingleZipReader, "r"),
|
| 120 |
+
"stdin": (lambda *args: sys.stdin, None),
|
| 121 |
+
}
|
| 122 |
+
if has_bz2:
|
| 123 |
+
compression_formats["bz2"] = (bz2.BZ2File, "r")
|
| 124 |
+
if has_lzma:
|
| 125 |
+
compression_formats["lzma"] = (lzma.LZMAFile, "r")
|
| 126 |
+
compression_formats["xz"] = (lzma.LZMAFile, "r")
|
| 127 |
+
return compression_formats
|
| 128 |
+
|
| 129 |
+
def reset_sequences(self, connection, models):
|
| 130 |
+
"""Reset database sequences for the given connection and models."""
|
| 131 |
+
sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
|
| 132 |
+
if sequence_sql:
|
| 133 |
+
if self.verbosity >= 2:
|
| 134 |
+
self.stdout.write("Resetting sequences")
|
| 135 |
+
with connection.cursor() as cursor:
|
| 136 |
+
for line in sequence_sql:
|
| 137 |
+
cursor.execute(line)
|
| 138 |
+
|
| 139 |
+
def loaddata(self, fixture_labels):
|
| 140 |
+
connection = connections[self.using]
|
| 141 |
+
|
| 142 |
+
# Keep a count of the installed objects and fixtures
|
| 143 |
+
self.fixture_count = 0
|
| 144 |
+
self.loaded_object_count = 0
|
| 145 |
+
self.fixture_object_count = 0
|
| 146 |
+
self.models = set()
|
| 147 |
+
|
| 148 |
+
self.serialization_formats = serializers.get_public_serializer_formats()
|
| 149 |
+
|
| 150 |
+
# Django's test suite repeatedly tries to load initial_data fixtures
|
| 151 |
+
# from apps that don't have any fixtures. Because disabling constraint
|
| 152 |
+
# checks can be expensive on some database (especially MSSQL), bail
|
| 153 |
+
# out early if no fixtures are found.
|
| 154 |
+
for fixture_label in fixture_labels:
|
| 155 |
+
if self.find_fixtures(fixture_label):
|
| 156 |
+
break
|
| 157 |
+
else:
|
| 158 |
+
return
|
| 159 |
+
|
| 160 |
+
self.objs_with_deferred_fields = []
|
| 161 |
+
with connection.constraint_checks_disabled():
|
| 162 |
+
for fixture_label in fixture_labels:
|
| 163 |
+
self.load_label(fixture_label)
|
| 164 |
+
for obj in self.objs_with_deferred_fields:
|
| 165 |
+
obj.save_deferred_fields(using=self.using)
|
| 166 |
+
|
| 167 |
+
# Since we disabled constraint checks, we must manually check for
|
| 168 |
+
# any invalid keys that might have been added
|
| 169 |
+
table_names = [model._meta.db_table for model in self.models]
|
| 170 |
+
try:
|
| 171 |
+
connection.check_constraints(table_names=table_names)
|
| 172 |
+
except Exception as e:
|
| 173 |
+
e.args = ("Problem installing fixtures: %s" % e,)
|
| 174 |
+
raise
|
| 175 |
+
|
| 176 |
+
# If we found even one object in a fixture, we need to reset the
|
| 177 |
+
# database sequences.
|
| 178 |
+
if self.loaded_object_count > 0:
|
| 179 |
+
self.reset_sequences(connection, self.models)
|
| 180 |
+
|
| 181 |
+
if self.verbosity >= 1:
|
| 182 |
+
if self.fixture_object_count == self.loaded_object_count:
|
| 183 |
+
self.stdout.write(
|
| 184 |
+
"Installed %d object(s) from %d fixture(s)"
|
| 185 |
+
% (self.loaded_object_count, self.fixture_count)
|
| 186 |
+
)
|
| 187 |
+
else:
|
| 188 |
+
self.stdout.write(
|
| 189 |
+
"Installed %d object(s) (of %d) from %d fixture(s)"
|
| 190 |
+
% (
|
| 191 |
+
self.loaded_object_count,
|
| 192 |
+
self.fixture_object_count,
|
| 193 |
+
self.fixture_count,
|
| 194 |
+
)
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
def save_obj(self, obj):
|
| 198 |
+
"""Save an object if permitted."""
|
| 199 |
+
if (
|
| 200 |
+
obj.object._meta.app_config in self.excluded_apps
|
| 201 |
+
or type(obj.object) in self.excluded_models
|
| 202 |
+
):
|
| 203 |
+
return False
|
| 204 |
+
saved = False
|
| 205 |
+
if router.allow_migrate_model(self.using, obj.object.__class__):
|
| 206 |
+
saved = True
|
| 207 |
+
self.models.add(obj.object.__class__)
|
| 208 |
+
try:
|
| 209 |
+
obj.save(using=self.using)
|
| 210 |
+
# psycopg raises ValueError if data contains NUL chars.
|
| 211 |
+
except (DatabaseError, IntegrityError, ValueError) as e:
|
| 212 |
+
e.args = (
|
| 213 |
+
"Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s"
|
| 214 |
+
% {
|
| 215 |
+
"object_label": obj.object._meta.label,
|
| 216 |
+
"pk": obj.object.pk,
|
| 217 |
+
"error_msg": e,
|
| 218 |
+
},
|
| 219 |
+
)
|
| 220 |
+
raise
|
| 221 |
+
if obj.deferred_fields:
|
| 222 |
+
self.objs_with_deferred_fields.append(obj)
|
| 223 |
+
return saved
|
| 224 |
+
|
| 225 |
+
def load_label(self, fixture_label):
|
| 226 |
+
"""Load fixtures files for a given label."""
|
| 227 |
+
show_progress = self.verbosity >= 3
|
| 228 |
+
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(
|
| 229 |
+
fixture_label
|
| 230 |
+
):
|
| 231 |
+
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
|
| 232 |
+
open_method, mode = self.compression_formats[cmp_fmt]
|
| 233 |
+
fixture = open_method(fixture_file, mode)
|
| 234 |
+
self.fixture_count += 1
|
| 235 |
+
objects_in_fixture = 0
|
| 236 |
+
loaded_objects_in_fixture = 0
|
| 237 |
+
if self.verbosity >= 2:
|
| 238 |
+
self.stdout.write(
|
| 239 |
+
"Installing %s fixture '%s' from %s."
|
| 240 |
+
% (ser_fmt, fixture_name, humanize(fixture_dir))
|
| 241 |
+
)
|
| 242 |
+
try:
|
| 243 |
+
objects = serializers.deserialize(
|
| 244 |
+
ser_fmt,
|
| 245 |
+
fixture,
|
| 246 |
+
using=self.using,
|
| 247 |
+
ignorenonexistent=self.ignore,
|
| 248 |
+
handle_forward_references=True,
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
for obj in objects:
|
| 252 |
+
objects_in_fixture += 1
|
| 253 |
+
if self.save_obj(obj):
|
| 254 |
+
loaded_objects_in_fixture += 1
|
| 255 |
+
if show_progress:
|
| 256 |
+
self.stdout.write(
|
| 257 |
+
"\rProcessed %i object(s)." % loaded_objects_in_fixture,
|
| 258 |
+
ending="",
|
| 259 |
+
)
|
| 260 |
+
except Exception as e:
|
| 261 |
+
if not isinstance(e, CommandError):
|
| 262 |
+
e.args = (
|
| 263 |
+
"Problem installing fixture '%s': %s" % (fixture_file, e),
|
| 264 |
+
)
|
| 265 |
+
raise
|
| 266 |
+
finally:
|
| 267 |
+
fixture.close()
|
| 268 |
+
|
| 269 |
+
if objects_in_fixture and show_progress:
|
| 270 |
+
self.stdout.write() # Add a newline after progress indicator.
|
| 271 |
+
self.loaded_object_count += loaded_objects_in_fixture
|
| 272 |
+
self.fixture_object_count += objects_in_fixture
|
| 273 |
+
# Warn if the fixture we loaded contains 0 objects.
|
| 274 |
+
if objects_in_fixture == 0:
|
| 275 |
+
warnings.warn(
|
| 276 |
+
"No fixture data found for '%s'. (File format may be "
|
| 277 |
+
"invalid.)" % fixture_name,
|
| 278 |
+
RuntimeWarning,
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
def get_fixture_name_and_dirs(self, fixture_name):
|
| 282 |
+
dirname, basename = os.path.split(fixture_name)
|
| 283 |
+
if os.path.isabs(fixture_name):
|
| 284 |
+
fixture_dirs = [dirname]
|
| 285 |
+
else:
|
| 286 |
+
fixture_dirs = self.fixture_dirs
|
| 287 |
+
if os.path.sep in os.path.normpath(fixture_name):
|
| 288 |
+
fixture_dirs = [os.path.join(dir_, dirname) for dir_ in fixture_dirs]
|
| 289 |
+
return basename, fixture_dirs
|
| 290 |
+
|
| 291 |
+
def get_targets(self, fixture_name, ser_fmt, cmp_fmt):
|
| 292 |
+
databases = [self.using, None]
|
| 293 |
+
cmp_fmts = self.compression_formats if cmp_fmt is None else [cmp_fmt]
|
| 294 |
+
ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt]
|
| 295 |
+
return {
|
| 296 |
+
"%s.%s"
|
| 297 |
+
% (
|
| 298 |
+
fixture_name,
|
| 299 |
+
".".join([ext for ext in combo if ext]),
|
| 300 |
+
)
|
| 301 |
+
for combo in product(databases, ser_fmts, cmp_fmts)
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
def find_fixture_files_in_dir(self, fixture_dir, fixture_name, targets):
|
| 305 |
+
fixture_files_in_dir = []
|
| 306 |
+
path = os.path.join(fixture_dir, fixture_name)
|
| 307 |
+
for candidate in glob.iglob(glob.escape(path) + "*"):
|
| 308 |
+
if os.path.basename(candidate) in targets:
|
| 309 |
+
# Save the fixture_dir and fixture_name for future error
|
| 310 |
+
# messages.
|
| 311 |
+
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
|
| 312 |
+
return fixture_files_in_dir
|
| 313 |
+
|
| 314 |
+
@functools.cache
|
| 315 |
+
def find_fixtures(self, fixture_label):
|
| 316 |
+
"""Find fixture files for a given label."""
|
| 317 |
+
if fixture_label == READ_STDIN:
|
| 318 |
+
return [(READ_STDIN, None, READ_STDIN)]
|
| 319 |
+
|
| 320 |
+
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
|
| 321 |
+
if self.verbosity >= 2:
|
| 322 |
+
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
| 323 |
+
|
| 324 |
+
fixture_name, fixture_dirs = self.get_fixture_name_and_dirs(fixture_name)
|
| 325 |
+
targets = self.get_targets(fixture_name, ser_fmt, cmp_fmt)
|
| 326 |
+
fixture_files = []
|
| 327 |
+
for fixture_dir in fixture_dirs:
|
| 328 |
+
if self.verbosity >= 2:
|
| 329 |
+
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
|
| 330 |
+
fixture_files_in_dir = self.find_fixture_files_in_dir(
|
| 331 |
+
fixture_dir,
|
| 332 |
+
fixture_name,
|
| 333 |
+
targets,
|
| 334 |
+
)
|
| 335 |
+
if self.verbosity >= 2 and not fixture_files_in_dir:
|
| 336 |
+
self.stdout.write(
|
| 337 |
+
"No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir))
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
# Check kept for backwards-compatibility; it isn't clear why
|
| 341 |
+
# duplicates are only allowed in different directories.
|
| 342 |
+
if len(fixture_files_in_dir) > 1:
|
| 343 |
+
raise CommandError(
|
| 344 |
+
"Multiple fixtures named '%s' in %s. Aborting."
|
| 345 |
+
% (fixture_name, humanize(fixture_dir))
|
| 346 |
+
)
|
| 347 |
+
fixture_files.extend(fixture_files_in_dir)
|
| 348 |
+
|
| 349 |
+
if not fixture_files:
|
| 350 |
+
raise CommandError("No fixture named '%s' found." % fixture_name)
|
| 351 |
+
|
| 352 |
+
return fixture_files
|
| 353 |
+
|
| 354 |
+
@cached_property
|
| 355 |
+
def fixture_dirs(self):
|
| 356 |
+
"""
|
| 357 |
+
Return a list of fixture directories.
|
| 358 |
+
|
| 359 |
+
The list contains the 'fixtures' subdirectory of each installed
|
| 360 |
+
application, if it exists, the directories in FIXTURE_DIRS, and the
|
| 361 |
+
current directory.
|
| 362 |
+
"""
|
| 363 |
+
dirs = []
|
| 364 |
+
fixture_dirs = settings.FIXTURE_DIRS
|
| 365 |
+
if len(fixture_dirs) != len(set(fixture_dirs)):
|
| 366 |
+
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
|
| 367 |
+
for app_config in apps.get_app_configs():
|
| 368 |
+
app_label = app_config.label
|
| 369 |
+
app_dir = os.path.join(app_config.path, "fixtures")
|
| 370 |
+
if app_dir in [str(d) for d in fixture_dirs]:
|
| 371 |
+
raise ImproperlyConfigured(
|
| 372 |
+
"'%s' is a default fixture directory for the '%s' app "
|
| 373 |
+
"and cannot be listed in settings.FIXTURE_DIRS."
|
| 374 |
+
% (app_dir, app_label)
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
if self.app_label and app_label != self.app_label:
|
| 378 |
+
continue
|
| 379 |
+
if os.path.isdir(app_dir):
|
| 380 |
+
dirs.append(app_dir)
|
| 381 |
+
dirs.extend(fixture_dirs)
|
| 382 |
+
dirs.append("")
|
| 383 |
+
return [os.path.realpath(d) for d in dirs]
|
| 384 |
+
|
| 385 |
+
def parse_name(self, fixture_name):
|
| 386 |
+
"""
|
| 387 |
+
Split fixture name in name, serialization format, compression format.
|
| 388 |
+
"""
|
| 389 |
+
if fixture_name == READ_STDIN:
|
| 390 |
+
if not self.format:
|
| 391 |
+
raise CommandError(
|
| 392 |
+
"--format must be specified when reading from stdin."
|
| 393 |
+
)
|
| 394 |
+
return READ_STDIN, self.format, "stdin"
|
| 395 |
+
|
| 396 |
+
parts = fixture_name.rsplit(".", 2)
|
| 397 |
+
|
| 398 |
+
if len(parts) > 1 and parts[-1] in self.compression_formats:
|
| 399 |
+
cmp_fmt = parts[-1]
|
| 400 |
+
parts = parts[:-1]
|
| 401 |
+
else:
|
| 402 |
+
cmp_fmt = None
|
| 403 |
+
|
| 404 |
+
if len(parts) > 1:
|
| 405 |
+
if parts[-1] in self.serialization_formats:
|
| 406 |
+
ser_fmt = parts[-1]
|
| 407 |
+
parts = parts[:-1]
|
| 408 |
+
else:
|
| 409 |
+
raise CommandError(
|
| 410 |
+
"Problem installing fixture '%s': %s is not a known "
|
| 411 |
+
"serialization format." % (".".join(parts[:-1]), parts[-1])
|
| 412 |
+
)
|
| 413 |
+
else:
|
| 414 |
+
ser_fmt = None
|
| 415 |
+
|
| 416 |
+
name = ".".join(parts)
|
| 417 |
+
|
| 418 |
+
return name, ser_fmt, cmp_fmt
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
class SingleZipReader(zipfile.ZipFile):
|
| 422 |
+
def __init__(self, *args, **kwargs):
|
| 423 |
+
super().__init__(*args, **kwargs)
|
| 424 |
+
if len(self.namelist()) != 1:
|
| 425 |
+
raise ValueError("Zip-compressed fixtures must contain one file.")
|
| 426 |
+
|
| 427 |
+
def read(self):
|
| 428 |
+
return zipfile.ZipFile.read(self, self.namelist()[0])
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def humanize(dirname):
|
| 432 |
+
return "'%s'" % dirname if dirname else "absolute path"
|
testbed/django__django/django/core/management/commands/makemessages.py
ADDED
|
@@ -0,0 +1,783 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import glob
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import sys
|
| 5 |
+
from functools import total_ordering
|
| 6 |
+
from itertools import dropwhile
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
import django
|
| 10 |
+
from django.conf import settings
|
| 11 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 12 |
+
from django.core.files.temp import NamedTemporaryFile
|
| 13 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 14 |
+
from django.core.management.utils import (
|
| 15 |
+
find_command,
|
| 16 |
+
handle_extensions,
|
| 17 |
+
is_ignored_path,
|
| 18 |
+
popen_wrapper,
|
| 19 |
+
)
|
| 20 |
+
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
|
| 21 |
+
from django.utils.functional import cached_property
|
| 22 |
+
from django.utils.jslex import prepare_js_for_gettext
|
| 23 |
+
from django.utils.regex_helper import _lazy_re_compile
|
| 24 |
+
from django.utils.text import get_text_list
|
| 25 |
+
from django.utils.translation import templatize
|
| 26 |
+
|
| 27 |
+
plural_forms_re = _lazy_re_compile(
|
| 28 |
+
r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL
|
| 29 |
+
)
|
| 30 |
+
STATUS_OK = 0
|
| 31 |
+
NO_LOCALE_DIR = object()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def check_programs(*programs):
|
| 35 |
+
for program in programs:
|
| 36 |
+
if find_command(program) is None:
|
| 37 |
+
raise CommandError(
|
| 38 |
+
"Can't find %s. Make sure you have GNU gettext tools 0.15 or "
|
| 39 |
+
"newer installed." % program
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def is_valid_locale(locale):
|
| 44 |
+
return re.match(r"^[a-z]+$", locale) or re.match(r"^[a-z]+_[A-Z].*$", locale)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@total_ordering
|
| 48 |
+
class TranslatableFile:
|
| 49 |
+
def __init__(self, dirpath, file_name, locale_dir):
|
| 50 |
+
self.file = file_name
|
| 51 |
+
self.dirpath = dirpath
|
| 52 |
+
self.locale_dir = locale_dir
|
| 53 |
+
|
| 54 |
+
def __repr__(self):
|
| 55 |
+
return "<%s: %s>" % (
|
| 56 |
+
self.__class__.__name__,
|
| 57 |
+
os.sep.join([self.dirpath, self.file]),
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
def __eq__(self, other):
|
| 61 |
+
return self.path == other.path
|
| 62 |
+
|
| 63 |
+
def __lt__(self, other):
|
| 64 |
+
return self.path < other.path
|
| 65 |
+
|
| 66 |
+
@property
|
| 67 |
+
def path(self):
|
| 68 |
+
return os.path.join(self.dirpath, self.file)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class BuildFile:
|
| 72 |
+
"""
|
| 73 |
+
Represent the state of a translatable file during the build process.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
def __init__(self, command, domain, translatable):
|
| 77 |
+
self.command = command
|
| 78 |
+
self.domain = domain
|
| 79 |
+
self.translatable = translatable
|
| 80 |
+
|
| 81 |
+
@cached_property
|
| 82 |
+
def is_templatized(self):
|
| 83 |
+
if self.domain == "djangojs":
|
| 84 |
+
return self.command.gettext_version < (0, 18, 3)
|
| 85 |
+
elif self.domain == "django":
|
| 86 |
+
file_ext = os.path.splitext(self.translatable.file)[1]
|
| 87 |
+
return file_ext != ".py"
|
| 88 |
+
return False
|
| 89 |
+
|
| 90 |
+
@cached_property
|
| 91 |
+
def path(self):
|
| 92 |
+
return self.translatable.path
|
| 93 |
+
|
| 94 |
+
@cached_property
|
| 95 |
+
def work_path(self):
|
| 96 |
+
"""
|
| 97 |
+
Path to a file which is being fed into GNU gettext pipeline. This may
|
| 98 |
+
be either a translatable or its preprocessed version.
|
| 99 |
+
"""
|
| 100 |
+
if not self.is_templatized:
|
| 101 |
+
return self.path
|
| 102 |
+
extension = {
|
| 103 |
+
"djangojs": "c",
|
| 104 |
+
"django": "py",
|
| 105 |
+
}.get(self.domain)
|
| 106 |
+
filename = "%s.%s" % (self.translatable.file, extension)
|
| 107 |
+
return os.path.join(self.translatable.dirpath, filename)
|
| 108 |
+
|
| 109 |
+
def preprocess(self):
|
| 110 |
+
"""
|
| 111 |
+
Preprocess (if necessary) a translatable file before passing it to
|
| 112 |
+
xgettext GNU gettext utility.
|
| 113 |
+
"""
|
| 114 |
+
if not self.is_templatized:
|
| 115 |
+
return
|
| 116 |
+
|
| 117 |
+
with open(self.path, encoding="utf-8") as fp:
|
| 118 |
+
src_data = fp.read()
|
| 119 |
+
|
| 120 |
+
if self.domain == "djangojs":
|
| 121 |
+
content = prepare_js_for_gettext(src_data)
|
| 122 |
+
elif self.domain == "django":
|
| 123 |
+
content = templatize(src_data, origin=self.path[2:])
|
| 124 |
+
|
| 125 |
+
with open(self.work_path, "w", encoding="utf-8") as fp:
|
| 126 |
+
fp.write(content)
|
| 127 |
+
|
| 128 |
+
def postprocess_messages(self, msgs):
|
| 129 |
+
"""
|
| 130 |
+
Postprocess messages generated by xgettext GNU gettext utility.
|
| 131 |
+
|
| 132 |
+
Transform paths as if these messages were generated from original
|
| 133 |
+
translatable files rather than from preprocessed versions.
|
| 134 |
+
"""
|
| 135 |
+
if not self.is_templatized:
|
| 136 |
+
return msgs
|
| 137 |
+
|
| 138 |
+
# Remove '.py' suffix
|
| 139 |
+
if os.name == "nt":
|
| 140 |
+
# Preserve '.\' prefix on Windows to respect gettext behavior
|
| 141 |
+
old_path = self.work_path
|
| 142 |
+
new_path = self.path
|
| 143 |
+
else:
|
| 144 |
+
old_path = self.work_path[2:]
|
| 145 |
+
new_path = self.path[2:]
|
| 146 |
+
|
| 147 |
+
return re.sub(
|
| 148 |
+
r"^(#: .*)(" + re.escape(old_path) + r")",
|
| 149 |
+
lambda match: match[0].replace(old_path, new_path),
|
| 150 |
+
msgs,
|
| 151 |
+
flags=re.MULTILINE,
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
def cleanup(self):
|
| 155 |
+
"""
|
| 156 |
+
Remove a preprocessed copy of a translatable file (if any).
|
| 157 |
+
"""
|
| 158 |
+
if self.is_templatized:
|
| 159 |
+
# This check is needed for the case of a symlinked file and its
|
| 160 |
+
# source being processed inside a single group (locale dir);
|
| 161 |
+
# removing either of those two removes both.
|
| 162 |
+
if os.path.exists(self.work_path):
|
| 163 |
+
os.unlink(self.work_path)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def normalize_eols(raw_contents):
|
| 167 |
+
"""
|
| 168 |
+
Take a block of raw text that will be passed through str.splitlines() to
|
| 169 |
+
get universal newlines treatment.
|
| 170 |
+
|
| 171 |
+
Return the resulting block of text with normalized `\n` EOL sequences ready
|
| 172 |
+
to be written to disk using current platform's native EOLs.
|
| 173 |
+
"""
|
| 174 |
+
lines_list = raw_contents.splitlines()
|
| 175 |
+
# Ensure last line has its EOL
|
| 176 |
+
if lines_list and lines_list[-1]:
|
| 177 |
+
lines_list.append("")
|
| 178 |
+
return "\n".join(lines_list)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def write_pot_file(potfile, msgs):
|
| 182 |
+
"""
|
| 183 |
+
Write the `potfile` with the `msgs` contents, making sure its format is
|
| 184 |
+
valid.
|
| 185 |
+
"""
|
| 186 |
+
pot_lines = msgs.splitlines()
|
| 187 |
+
if os.path.exists(potfile):
|
| 188 |
+
# Strip the header
|
| 189 |
+
lines = dropwhile(len, pot_lines)
|
| 190 |
+
else:
|
| 191 |
+
lines = []
|
| 192 |
+
found, header_read = False, False
|
| 193 |
+
for line in pot_lines:
|
| 194 |
+
if not found and not header_read:
|
| 195 |
+
if "charset=CHARSET" in line:
|
| 196 |
+
found = True
|
| 197 |
+
line = line.replace("charset=CHARSET", "charset=UTF-8")
|
| 198 |
+
if not line and not found:
|
| 199 |
+
header_read = True
|
| 200 |
+
lines.append(line)
|
| 201 |
+
msgs = "\n".join(lines)
|
| 202 |
+
# Force newlines of POT files to '\n' to work around
|
| 203 |
+
# https://savannah.gnu.org/bugs/index.php?52395
|
| 204 |
+
with open(potfile, "a", encoding="utf-8", newline="\n") as fp:
|
| 205 |
+
fp.write(msgs)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class Command(BaseCommand):
|
| 209 |
+
help = (
|
| 210 |
+
"Runs over the entire source tree of the current directory and pulls out all "
|
| 211 |
+
"strings marked for translation. It creates (or updates) a message file in the "
|
| 212 |
+
"conf/locale (in the django tree) or locale (for projects and applications) "
|
| 213 |
+
"directory.\n\nYou must run this command with one of either the --locale, "
|
| 214 |
+
"--exclude, or --all options."
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
translatable_file_class = TranslatableFile
|
| 218 |
+
build_file_class = BuildFile
|
| 219 |
+
|
| 220 |
+
requires_system_checks = []
|
| 221 |
+
|
| 222 |
+
msgmerge_options = ["-q", "--backup=none", "--previous", "--update"]
|
| 223 |
+
msguniq_options = ["--to-code=utf-8"]
|
| 224 |
+
msgattrib_options = ["--no-obsolete"]
|
| 225 |
+
xgettext_options = ["--from-code=UTF-8", "--add-comments=Translators"]
|
| 226 |
+
|
| 227 |
+
def add_arguments(self, parser):
|
| 228 |
+
parser.add_argument(
|
| 229 |
+
"--locale",
|
| 230 |
+
"-l",
|
| 231 |
+
default=[],
|
| 232 |
+
action="append",
|
| 233 |
+
help=(
|
| 234 |
+
"Creates or updates the message files for the given locale(s) (e.g. "
|
| 235 |
+
"pt_BR). Can be used multiple times."
|
| 236 |
+
),
|
| 237 |
+
)
|
| 238 |
+
parser.add_argument(
|
| 239 |
+
"--exclude",
|
| 240 |
+
"-x",
|
| 241 |
+
default=[],
|
| 242 |
+
action="append",
|
| 243 |
+
help="Locales to exclude. Default is none. Can be used multiple times.",
|
| 244 |
+
)
|
| 245 |
+
parser.add_argument(
|
| 246 |
+
"--domain",
|
| 247 |
+
"-d",
|
| 248 |
+
default="django",
|
| 249 |
+
help='The domain of the message files (default: "django").',
|
| 250 |
+
)
|
| 251 |
+
parser.add_argument(
|
| 252 |
+
"--all",
|
| 253 |
+
"-a",
|
| 254 |
+
action="store_true",
|
| 255 |
+
help="Updates the message files for all existing locales.",
|
| 256 |
+
)
|
| 257 |
+
parser.add_argument(
|
| 258 |
+
"--extension",
|
| 259 |
+
"-e",
|
| 260 |
+
dest="extensions",
|
| 261 |
+
action="append",
|
| 262 |
+
help='The file extension(s) to examine (default: "html,txt,py", or "js" '
|
| 263 |
+
'if the domain is "djangojs"). Separate multiple extensions with '
|
| 264 |
+
"commas, or use -e multiple times.",
|
| 265 |
+
)
|
| 266 |
+
parser.add_argument(
|
| 267 |
+
"--symlinks",
|
| 268 |
+
"-s",
|
| 269 |
+
action="store_true",
|
| 270 |
+
help="Follows symlinks to directories when examining source code "
|
| 271 |
+
"and templates for translation strings.",
|
| 272 |
+
)
|
| 273 |
+
parser.add_argument(
|
| 274 |
+
"--ignore",
|
| 275 |
+
"-i",
|
| 276 |
+
action="append",
|
| 277 |
+
dest="ignore_patterns",
|
| 278 |
+
default=[],
|
| 279 |
+
metavar="PATTERN",
|
| 280 |
+
help="Ignore files or directories matching this glob-style pattern. "
|
| 281 |
+
"Use multiple times to ignore more.",
|
| 282 |
+
)
|
| 283 |
+
parser.add_argument(
|
| 284 |
+
"--no-default-ignore",
|
| 285 |
+
action="store_false",
|
| 286 |
+
dest="use_default_ignore_patterns",
|
| 287 |
+
help=(
|
| 288 |
+
"Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and "
|
| 289 |
+
"'*.pyc'."
|
| 290 |
+
),
|
| 291 |
+
)
|
| 292 |
+
parser.add_argument(
|
| 293 |
+
"--no-wrap",
|
| 294 |
+
action="store_true",
|
| 295 |
+
help="Don't break long message lines into several lines.",
|
| 296 |
+
)
|
| 297 |
+
parser.add_argument(
|
| 298 |
+
"--no-location",
|
| 299 |
+
action="store_true",
|
| 300 |
+
help="Don't write '#: filename:line' lines.",
|
| 301 |
+
)
|
| 302 |
+
parser.add_argument(
|
| 303 |
+
"--add-location",
|
| 304 |
+
choices=("full", "file", "never"),
|
| 305 |
+
const="full",
|
| 306 |
+
nargs="?",
|
| 307 |
+
help=(
|
| 308 |
+
"Controls '#: filename:line' lines. If the option is 'full' "
|
| 309 |
+
"(the default if not given), the lines include both file name "
|
| 310 |
+
"and line number. If it's 'file', the line number is omitted. If "
|
| 311 |
+
"it's 'never', the lines are suppressed (same as --no-location). "
|
| 312 |
+
"--add-location requires gettext 0.19 or newer."
|
| 313 |
+
),
|
| 314 |
+
)
|
| 315 |
+
parser.add_argument(
|
| 316 |
+
"--no-obsolete",
|
| 317 |
+
action="store_true",
|
| 318 |
+
help="Remove obsolete message strings.",
|
| 319 |
+
)
|
| 320 |
+
parser.add_argument(
|
| 321 |
+
"--keep-pot",
|
| 322 |
+
action="store_true",
|
| 323 |
+
help="Keep .pot file after making messages. Useful when debugging.",
|
| 324 |
+
)
|
| 325 |
+
|
| 326 |
+
def handle(self, *args, **options):
|
| 327 |
+
locale = options["locale"]
|
| 328 |
+
exclude = options["exclude"]
|
| 329 |
+
self.domain = options["domain"]
|
| 330 |
+
self.verbosity = options["verbosity"]
|
| 331 |
+
process_all = options["all"]
|
| 332 |
+
extensions = options["extensions"]
|
| 333 |
+
self.symlinks = options["symlinks"]
|
| 334 |
+
|
| 335 |
+
ignore_patterns = options["ignore_patterns"]
|
| 336 |
+
if options["use_default_ignore_patterns"]:
|
| 337 |
+
ignore_patterns += ["CVS", ".*", "*~", "*.pyc"]
|
| 338 |
+
self.ignore_patterns = list(set(ignore_patterns))
|
| 339 |
+
|
| 340 |
+
# Avoid messing with mutable class variables
|
| 341 |
+
if options["no_wrap"]:
|
| 342 |
+
self.msgmerge_options = self.msgmerge_options[:] + ["--no-wrap"]
|
| 343 |
+
self.msguniq_options = self.msguniq_options[:] + ["--no-wrap"]
|
| 344 |
+
self.msgattrib_options = self.msgattrib_options[:] + ["--no-wrap"]
|
| 345 |
+
self.xgettext_options = self.xgettext_options[:] + ["--no-wrap"]
|
| 346 |
+
if options["no_location"]:
|
| 347 |
+
self.msgmerge_options = self.msgmerge_options[:] + ["--no-location"]
|
| 348 |
+
self.msguniq_options = self.msguniq_options[:] + ["--no-location"]
|
| 349 |
+
self.msgattrib_options = self.msgattrib_options[:] + ["--no-location"]
|
| 350 |
+
self.xgettext_options = self.xgettext_options[:] + ["--no-location"]
|
| 351 |
+
if options["add_location"]:
|
| 352 |
+
if self.gettext_version < (0, 19):
|
| 353 |
+
raise CommandError(
|
| 354 |
+
"The --add-location option requires gettext 0.19 or later. "
|
| 355 |
+
"You have %s." % ".".join(str(x) for x in self.gettext_version)
|
| 356 |
+
)
|
| 357 |
+
arg_add_location = "--add-location=%s" % options["add_location"]
|
| 358 |
+
self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location]
|
| 359 |
+
self.msguniq_options = self.msguniq_options[:] + [arg_add_location]
|
| 360 |
+
self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location]
|
| 361 |
+
self.xgettext_options = self.xgettext_options[:] + [arg_add_location]
|
| 362 |
+
|
| 363 |
+
self.no_obsolete = options["no_obsolete"]
|
| 364 |
+
self.keep_pot = options["keep_pot"]
|
| 365 |
+
|
| 366 |
+
if self.domain not in ("django", "djangojs"):
|
| 367 |
+
raise CommandError(
|
| 368 |
+
"currently makemessages only supports domains "
|
| 369 |
+
"'django' and 'djangojs'"
|
| 370 |
+
)
|
| 371 |
+
if self.domain == "djangojs":
|
| 372 |
+
exts = extensions or ["js"]
|
| 373 |
+
else:
|
| 374 |
+
exts = extensions or ["html", "txt", "py"]
|
| 375 |
+
self.extensions = handle_extensions(exts)
|
| 376 |
+
|
| 377 |
+
if (not locale and not exclude and not process_all) or self.domain is None:
|
| 378 |
+
raise CommandError(
|
| 379 |
+
"Type '%s help %s' for usage information."
|
| 380 |
+
% (os.path.basename(sys.argv[0]), sys.argv[1])
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
if self.verbosity > 1:
|
| 384 |
+
self.stdout.write(
|
| 385 |
+
"examining files with the extensions: %s"
|
| 386 |
+
% get_text_list(list(self.extensions), "and")
|
| 387 |
+
)
|
| 388 |
+
|
| 389 |
+
self.invoked_for_django = False
|
| 390 |
+
self.locale_paths = []
|
| 391 |
+
self.default_locale_path = None
|
| 392 |
+
if os.path.isdir(os.path.join("conf", "locale")):
|
| 393 |
+
self.locale_paths = [os.path.abspath(os.path.join("conf", "locale"))]
|
| 394 |
+
self.default_locale_path = self.locale_paths[0]
|
| 395 |
+
self.invoked_for_django = True
|
| 396 |
+
else:
|
| 397 |
+
if self.settings_available:
|
| 398 |
+
self.locale_paths.extend(settings.LOCALE_PATHS)
|
| 399 |
+
# Allow to run makemessages inside an app dir
|
| 400 |
+
if os.path.isdir("locale"):
|
| 401 |
+
self.locale_paths.append(os.path.abspath("locale"))
|
| 402 |
+
if self.locale_paths:
|
| 403 |
+
self.default_locale_path = self.locale_paths[0]
|
| 404 |
+
os.makedirs(self.default_locale_path, exist_ok=True)
|
| 405 |
+
|
| 406 |
+
# Build locale list
|
| 407 |
+
looks_like_locale = re.compile(r"[a-z]{2}")
|
| 408 |
+
locale_dirs = filter(
|
| 409 |
+
os.path.isdir, glob.glob("%s/*" % self.default_locale_path)
|
| 410 |
+
)
|
| 411 |
+
all_locales = [
|
| 412 |
+
lang_code
|
| 413 |
+
for lang_code in map(os.path.basename, locale_dirs)
|
| 414 |
+
if looks_like_locale.match(lang_code)
|
| 415 |
+
]
|
| 416 |
+
|
| 417 |
+
# Account for excluded locales
|
| 418 |
+
if process_all:
|
| 419 |
+
locales = all_locales
|
| 420 |
+
else:
|
| 421 |
+
locales = locale or all_locales
|
| 422 |
+
locales = set(locales).difference(exclude)
|
| 423 |
+
|
| 424 |
+
if locales:
|
| 425 |
+
check_programs("msguniq", "msgmerge", "msgattrib")
|
| 426 |
+
|
| 427 |
+
check_programs("xgettext")
|
| 428 |
+
|
| 429 |
+
try:
|
| 430 |
+
potfiles = self.build_potfiles()
|
| 431 |
+
|
| 432 |
+
# Build po files for each selected locale
|
| 433 |
+
for locale in locales:
|
| 434 |
+
if not is_valid_locale(locale):
|
| 435 |
+
# Try to guess what valid locale it could be
|
| 436 |
+
# Valid examples are: en_GB, shi_Latn_MA and nl_NL-x-informal
|
| 437 |
+
|
| 438 |
+
# Search for characters followed by a non character (i.e. separator)
|
| 439 |
+
match = re.match(
|
| 440 |
+
r"^(?P<language>[a-zA-Z]+)"
|
| 441 |
+
r"(?P<separator>[^a-zA-Z])"
|
| 442 |
+
r"(?P<territory>.+)$",
|
| 443 |
+
locale,
|
| 444 |
+
)
|
| 445 |
+
if match:
|
| 446 |
+
locale_parts = match.groupdict()
|
| 447 |
+
language = locale_parts["language"].lower()
|
| 448 |
+
territory = (
|
| 449 |
+
locale_parts["territory"][:2].upper()
|
| 450 |
+
+ locale_parts["territory"][2:]
|
| 451 |
+
)
|
| 452 |
+
proposed_locale = f"{language}_{territory}"
|
| 453 |
+
else:
|
| 454 |
+
# It could be a language in uppercase
|
| 455 |
+
proposed_locale = locale.lower()
|
| 456 |
+
|
| 457 |
+
# Recheck if the proposed locale is valid
|
| 458 |
+
if is_valid_locale(proposed_locale):
|
| 459 |
+
self.stdout.write(
|
| 460 |
+
"invalid locale %s, did you mean %s?"
|
| 461 |
+
% (
|
| 462 |
+
locale,
|
| 463 |
+
proposed_locale,
|
| 464 |
+
),
|
| 465 |
+
)
|
| 466 |
+
else:
|
| 467 |
+
self.stdout.write("invalid locale %s" % locale)
|
| 468 |
+
|
| 469 |
+
continue
|
| 470 |
+
if self.verbosity > 0:
|
| 471 |
+
self.stdout.write("processing locale %s" % locale)
|
| 472 |
+
for potfile in potfiles:
|
| 473 |
+
self.write_po_file(potfile, locale)
|
| 474 |
+
finally:
|
| 475 |
+
if not self.keep_pot:
|
| 476 |
+
self.remove_potfiles()
|
| 477 |
+
|
| 478 |
+
@cached_property
|
| 479 |
+
def gettext_version(self):
|
| 480 |
+
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
|
| 481 |
+
# when looking up the version. It's especially a problem on Windows.
|
| 482 |
+
out, err, status = popen_wrapper(
|
| 483 |
+
["xgettext", "--version"],
|
| 484 |
+
stdout_encoding=DEFAULT_LOCALE_ENCODING,
|
| 485 |
+
)
|
| 486 |
+
m = re.search(r"(\d+)\.(\d+)\.?(\d+)?", out)
|
| 487 |
+
if m:
|
| 488 |
+
return tuple(int(d) for d in m.groups() if d is not None)
|
| 489 |
+
else:
|
| 490 |
+
raise CommandError("Unable to get gettext version. Is it installed?")
|
| 491 |
+
|
| 492 |
+
@cached_property
|
| 493 |
+
def settings_available(self):
|
| 494 |
+
try:
|
| 495 |
+
settings.LOCALE_PATHS
|
| 496 |
+
except ImproperlyConfigured:
|
| 497 |
+
if self.verbosity > 1:
|
| 498 |
+
self.stderr.write("Running without configured settings.")
|
| 499 |
+
return False
|
| 500 |
+
return True
|
| 501 |
+
|
| 502 |
+
def build_potfiles(self):
|
| 503 |
+
"""
|
| 504 |
+
Build pot files and apply msguniq to them.
|
| 505 |
+
"""
|
| 506 |
+
file_list = self.find_files(".")
|
| 507 |
+
self.remove_potfiles()
|
| 508 |
+
self.process_files(file_list)
|
| 509 |
+
potfiles = []
|
| 510 |
+
for path in self.locale_paths:
|
| 511 |
+
potfile = os.path.join(path, "%s.pot" % self.domain)
|
| 512 |
+
if not os.path.exists(potfile):
|
| 513 |
+
continue
|
| 514 |
+
args = ["msguniq"] + self.msguniq_options + [potfile]
|
| 515 |
+
msgs, errors, status = popen_wrapper(args)
|
| 516 |
+
if errors:
|
| 517 |
+
if status != STATUS_OK:
|
| 518 |
+
raise CommandError(
|
| 519 |
+
"errors happened while running msguniq\n%s" % errors
|
| 520 |
+
)
|
| 521 |
+
elif self.verbosity > 0:
|
| 522 |
+
self.stdout.write(errors)
|
| 523 |
+
msgs = normalize_eols(msgs)
|
| 524 |
+
with open(potfile, "w", encoding="utf-8") as fp:
|
| 525 |
+
fp.write(msgs)
|
| 526 |
+
potfiles.append(potfile)
|
| 527 |
+
return potfiles
|
| 528 |
+
|
| 529 |
+
def remove_potfiles(self):
|
| 530 |
+
for path in self.locale_paths:
|
| 531 |
+
pot_path = os.path.join(path, "%s.pot" % self.domain)
|
| 532 |
+
if os.path.exists(pot_path):
|
| 533 |
+
os.unlink(pot_path)
|
| 534 |
+
|
| 535 |
+
def find_files(self, root):
|
| 536 |
+
"""
|
| 537 |
+
Get all files in the given root. Also check that there is a matching
|
| 538 |
+
locale dir for each file.
|
| 539 |
+
"""
|
| 540 |
+
all_files = []
|
| 541 |
+
ignored_roots = []
|
| 542 |
+
if self.settings_available:
|
| 543 |
+
ignored_roots = [
|
| 544 |
+
os.path.normpath(p)
|
| 545 |
+
for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT)
|
| 546 |
+
if p
|
| 547 |
+
]
|
| 548 |
+
for dirpath, dirnames, filenames in os.walk(
|
| 549 |
+
root, topdown=True, followlinks=self.symlinks
|
| 550 |
+
):
|
| 551 |
+
for dirname in dirnames[:]:
|
| 552 |
+
if (
|
| 553 |
+
is_ignored_path(
|
| 554 |
+
os.path.normpath(os.path.join(dirpath, dirname)),
|
| 555 |
+
self.ignore_patterns,
|
| 556 |
+
)
|
| 557 |
+
or os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots
|
| 558 |
+
):
|
| 559 |
+
dirnames.remove(dirname)
|
| 560 |
+
if self.verbosity > 1:
|
| 561 |
+
self.stdout.write("ignoring directory %s" % dirname)
|
| 562 |
+
elif dirname == "locale":
|
| 563 |
+
dirnames.remove(dirname)
|
| 564 |
+
self.locale_paths.insert(
|
| 565 |
+
0, os.path.join(os.path.abspath(dirpath), dirname)
|
| 566 |
+
)
|
| 567 |
+
for filename in filenames:
|
| 568 |
+
file_path = os.path.normpath(os.path.join(dirpath, filename))
|
| 569 |
+
file_ext = os.path.splitext(filename)[1]
|
| 570 |
+
if file_ext not in self.extensions or is_ignored_path(
|
| 571 |
+
file_path, self.ignore_patterns
|
| 572 |
+
):
|
| 573 |
+
if self.verbosity > 1:
|
| 574 |
+
self.stdout.write(
|
| 575 |
+
"ignoring file %s in %s" % (filename, dirpath)
|
| 576 |
+
)
|
| 577 |
+
else:
|
| 578 |
+
locale_dir = None
|
| 579 |
+
for path in self.locale_paths:
|
| 580 |
+
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
| 581 |
+
locale_dir = path
|
| 582 |
+
break
|
| 583 |
+
locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR
|
| 584 |
+
all_files.append(
|
| 585 |
+
self.translatable_file_class(dirpath, filename, locale_dir)
|
| 586 |
+
)
|
| 587 |
+
return sorted(all_files)
|
| 588 |
+
|
| 589 |
+
def process_files(self, file_list):
|
| 590 |
+
"""
|
| 591 |
+
Group translatable files by locale directory and run pot file build
|
| 592 |
+
process for each group.
|
| 593 |
+
"""
|
| 594 |
+
file_groups = {}
|
| 595 |
+
for translatable in file_list:
|
| 596 |
+
file_group = file_groups.setdefault(translatable.locale_dir, [])
|
| 597 |
+
file_group.append(translatable)
|
| 598 |
+
for locale_dir, files in file_groups.items():
|
| 599 |
+
self.process_locale_dir(locale_dir, files)
|
| 600 |
+
|
| 601 |
+
def process_locale_dir(self, locale_dir, files):
|
| 602 |
+
"""
|
| 603 |
+
Extract translatable literals from the specified files, creating or
|
| 604 |
+
updating the POT file for a given locale directory.
|
| 605 |
+
|
| 606 |
+
Use the xgettext GNU gettext utility.
|
| 607 |
+
"""
|
| 608 |
+
build_files = []
|
| 609 |
+
for translatable in files:
|
| 610 |
+
if self.verbosity > 1:
|
| 611 |
+
self.stdout.write(
|
| 612 |
+
"processing file %s in %s"
|
| 613 |
+
% (translatable.file, translatable.dirpath)
|
| 614 |
+
)
|
| 615 |
+
if self.domain not in ("djangojs", "django"):
|
| 616 |
+
continue
|
| 617 |
+
build_file = self.build_file_class(self, self.domain, translatable)
|
| 618 |
+
try:
|
| 619 |
+
build_file.preprocess()
|
| 620 |
+
except UnicodeDecodeError as e:
|
| 621 |
+
self.stdout.write(
|
| 622 |
+
"UnicodeDecodeError: skipped file %s in %s (reason: %s)"
|
| 623 |
+
% (
|
| 624 |
+
translatable.file,
|
| 625 |
+
translatable.dirpath,
|
| 626 |
+
e,
|
| 627 |
+
)
|
| 628 |
+
)
|
| 629 |
+
continue
|
| 630 |
+
except BaseException:
|
| 631 |
+
# Cleanup before exit.
|
| 632 |
+
for build_file in build_files:
|
| 633 |
+
build_file.cleanup()
|
| 634 |
+
raise
|
| 635 |
+
build_files.append(build_file)
|
| 636 |
+
|
| 637 |
+
if self.domain == "djangojs":
|
| 638 |
+
is_templatized = build_file.is_templatized
|
| 639 |
+
args = [
|
| 640 |
+
"xgettext",
|
| 641 |
+
"-d",
|
| 642 |
+
self.domain,
|
| 643 |
+
"--language=%s" % ("C" if is_templatized else "JavaScript",),
|
| 644 |
+
"--keyword=gettext_noop",
|
| 645 |
+
"--keyword=gettext_lazy",
|
| 646 |
+
"--keyword=ngettext_lazy:1,2",
|
| 647 |
+
"--keyword=pgettext:1c,2",
|
| 648 |
+
"--keyword=npgettext:1c,2,3",
|
| 649 |
+
"--output=-",
|
| 650 |
+
]
|
| 651 |
+
elif self.domain == "django":
|
| 652 |
+
args = [
|
| 653 |
+
"xgettext",
|
| 654 |
+
"-d",
|
| 655 |
+
self.domain,
|
| 656 |
+
"--language=Python",
|
| 657 |
+
"--keyword=gettext_noop",
|
| 658 |
+
"--keyword=gettext_lazy",
|
| 659 |
+
"--keyword=ngettext_lazy:1,2",
|
| 660 |
+
"--keyword=pgettext:1c,2",
|
| 661 |
+
"--keyword=npgettext:1c,2,3",
|
| 662 |
+
"--keyword=pgettext_lazy:1c,2",
|
| 663 |
+
"--keyword=npgettext_lazy:1c,2,3",
|
| 664 |
+
"--output=-",
|
| 665 |
+
]
|
| 666 |
+
else:
|
| 667 |
+
return
|
| 668 |
+
|
| 669 |
+
input_files = [bf.work_path for bf in build_files]
|
| 670 |
+
with NamedTemporaryFile(mode="w+") as input_files_list:
|
| 671 |
+
input_files_list.write("\n".join(input_files))
|
| 672 |
+
input_files_list.flush()
|
| 673 |
+
args.extend(["--files-from", input_files_list.name])
|
| 674 |
+
args.extend(self.xgettext_options)
|
| 675 |
+
msgs, errors, status = popen_wrapper(args)
|
| 676 |
+
|
| 677 |
+
if errors:
|
| 678 |
+
if status != STATUS_OK:
|
| 679 |
+
for build_file in build_files:
|
| 680 |
+
build_file.cleanup()
|
| 681 |
+
raise CommandError(
|
| 682 |
+
"errors happened while running xgettext on %s\n%s"
|
| 683 |
+
% ("\n".join(input_files), errors)
|
| 684 |
+
)
|
| 685 |
+
elif self.verbosity > 0:
|
| 686 |
+
# Print warnings
|
| 687 |
+
self.stdout.write(errors)
|
| 688 |
+
|
| 689 |
+
if msgs:
|
| 690 |
+
if locale_dir is NO_LOCALE_DIR:
|
| 691 |
+
for build_file in build_files:
|
| 692 |
+
build_file.cleanup()
|
| 693 |
+
file_path = os.path.normpath(build_files[0].path)
|
| 694 |
+
raise CommandError(
|
| 695 |
+
"Unable to find a locale path to store translations for "
|
| 696 |
+
"file %s. Make sure the 'locale' directory exists in an "
|
| 697 |
+
"app or LOCALE_PATHS setting is set." % file_path
|
| 698 |
+
)
|
| 699 |
+
for build_file in build_files:
|
| 700 |
+
msgs = build_file.postprocess_messages(msgs)
|
| 701 |
+
potfile = os.path.join(locale_dir, "%s.pot" % self.domain)
|
| 702 |
+
write_pot_file(potfile, msgs)
|
| 703 |
+
|
| 704 |
+
for build_file in build_files:
|
| 705 |
+
build_file.cleanup()
|
| 706 |
+
|
| 707 |
+
def write_po_file(self, potfile, locale):
|
| 708 |
+
"""
|
| 709 |
+
Create or update the PO file for self.domain and `locale`.
|
| 710 |
+
Use contents of the existing `potfile`.
|
| 711 |
+
|
| 712 |
+
Use msgmerge and msgattrib GNU gettext utilities.
|
| 713 |
+
"""
|
| 714 |
+
basedir = os.path.join(os.path.dirname(potfile), locale, "LC_MESSAGES")
|
| 715 |
+
os.makedirs(basedir, exist_ok=True)
|
| 716 |
+
pofile = os.path.join(basedir, "%s.po" % self.domain)
|
| 717 |
+
|
| 718 |
+
if os.path.exists(pofile):
|
| 719 |
+
args = ["msgmerge"] + self.msgmerge_options + [pofile, potfile]
|
| 720 |
+
_, errors, status = popen_wrapper(args)
|
| 721 |
+
if errors:
|
| 722 |
+
if status != STATUS_OK:
|
| 723 |
+
raise CommandError(
|
| 724 |
+
"errors happened while running msgmerge\n%s" % errors
|
| 725 |
+
)
|
| 726 |
+
elif self.verbosity > 0:
|
| 727 |
+
self.stdout.write(errors)
|
| 728 |
+
msgs = Path(pofile).read_text(encoding="utf-8")
|
| 729 |
+
else:
|
| 730 |
+
with open(potfile, encoding="utf-8") as fp:
|
| 731 |
+
msgs = fp.read()
|
| 732 |
+
if not self.invoked_for_django:
|
| 733 |
+
msgs = self.copy_plural_forms(msgs, locale)
|
| 734 |
+
msgs = normalize_eols(msgs)
|
| 735 |
+
msgs = msgs.replace(
|
| 736 |
+
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, ""
|
| 737 |
+
)
|
| 738 |
+
with open(pofile, "w", encoding="utf-8") as fp:
|
| 739 |
+
fp.write(msgs)
|
| 740 |
+
|
| 741 |
+
if self.no_obsolete:
|
| 742 |
+
args = ["msgattrib"] + self.msgattrib_options + ["-o", pofile, pofile]
|
| 743 |
+
msgs, errors, status = popen_wrapper(args)
|
| 744 |
+
if errors:
|
| 745 |
+
if status != STATUS_OK:
|
| 746 |
+
raise CommandError(
|
| 747 |
+
"errors happened while running msgattrib\n%s" % errors
|
| 748 |
+
)
|
| 749 |
+
elif self.verbosity > 0:
|
| 750 |
+
self.stdout.write(errors)
|
| 751 |
+
|
| 752 |
+
def copy_plural_forms(self, msgs, locale):
|
| 753 |
+
"""
|
| 754 |
+
Copy plural forms header contents from a Django catalog of locale to
|
| 755 |
+
the msgs string, inserting it at the right place. msgs should be the
|
| 756 |
+
contents of a newly created .po file.
|
| 757 |
+
"""
|
| 758 |
+
django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
|
| 759 |
+
if self.domain == "djangojs":
|
| 760 |
+
domains = ("djangojs", "django")
|
| 761 |
+
else:
|
| 762 |
+
domains = ("django",)
|
| 763 |
+
for domain in domains:
|
| 764 |
+
django_po = os.path.join(
|
| 765 |
+
django_dir, "conf", "locale", locale, "LC_MESSAGES", "%s.po" % domain
|
| 766 |
+
)
|
| 767 |
+
if os.path.exists(django_po):
|
| 768 |
+
with open(django_po, encoding="utf-8") as fp:
|
| 769 |
+
m = plural_forms_re.search(fp.read())
|
| 770 |
+
if m:
|
| 771 |
+
plural_form_line = m["value"]
|
| 772 |
+
if self.verbosity > 1:
|
| 773 |
+
self.stdout.write("copying plural forms: %s" % plural_form_line)
|
| 774 |
+
lines = []
|
| 775 |
+
found = False
|
| 776 |
+
for line in msgs.splitlines():
|
| 777 |
+
if not found and (not line or plural_forms_re.search(line)):
|
| 778 |
+
line = plural_form_line
|
| 779 |
+
found = True
|
| 780 |
+
lines.append(line)
|
| 781 |
+
msgs = "\n".join(lines)
|
| 782 |
+
break
|
| 783 |
+
return msgs
|
testbed/django__django/django/core/management/commands/makemigrations.py
ADDED
|
@@ -0,0 +1,513 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import warnings
|
| 4 |
+
from itertools import takewhile
|
| 5 |
+
|
| 6 |
+
from django.apps import apps
|
| 7 |
+
from django.conf import settings
|
| 8 |
+
from django.core.management.base import BaseCommand, CommandError, no_translations
|
| 9 |
+
from django.core.management.utils import run_formatters
|
| 10 |
+
from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router
|
| 11 |
+
from django.db.migrations import Migration
|
| 12 |
+
from django.db.migrations.autodetector import MigrationAutodetector
|
| 13 |
+
from django.db.migrations.loader import MigrationLoader
|
| 14 |
+
from django.db.migrations.migration import SwappableTuple
|
| 15 |
+
from django.db.migrations.optimizer import MigrationOptimizer
|
| 16 |
+
from django.db.migrations.questioner import (
|
| 17 |
+
InteractiveMigrationQuestioner,
|
| 18 |
+
MigrationQuestioner,
|
| 19 |
+
NonInteractiveMigrationQuestioner,
|
| 20 |
+
)
|
| 21 |
+
from django.db.migrations.state import ProjectState
|
| 22 |
+
from django.db.migrations.utils import get_migration_name_timestamp
|
| 23 |
+
from django.db.migrations.writer import MigrationWriter
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Command(BaseCommand):
|
| 27 |
+
help = "Creates new migration(s) for apps."
|
| 28 |
+
|
| 29 |
+
def add_arguments(self, parser):
|
| 30 |
+
parser.add_argument(
|
| 31 |
+
"args",
|
| 32 |
+
metavar="app_label",
|
| 33 |
+
nargs="*",
|
| 34 |
+
help="Specify the app label(s) to create migrations for.",
|
| 35 |
+
)
|
| 36 |
+
parser.add_argument(
|
| 37 |
+
"--dry-run",
|
| 38 |
+
action="store_true",
|
| 39 |
+
help="Just show what migrations would be made; don't actually write them.",
|
| 40 |
+
)
|
| 41 |
+
parser.add_argument(
|
| 42 |
+
"--merge",
|
| 43 |
+
action="store_true",
|
| 44 |
+
help="Enable fixing of migration conflicts.",
|
| 45 |
+
)
|
| 46 |
+
parser.add_argument(
|
| 47 |
+
"--empty",
|
| 48 |
+
action="store_true",
|
| 49 |
+
help="Create an empty migration.",
|
| 50 |
+
)
|
| 51 |
+
parser.add_argument(
|
| 52 |
+
"--noinput",
|
| 53 |
+
"--no-input",
|
| 54 |
+
action="store_false",
|
| 55 |
+
dest="interactive",
|
| 56 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 57 |
+
)
|
| 58 |
+
parser.add_argument(
|
| 59 |
+
"-n",
|
| 60 |
+
"--name",
|
| 61 |
+
help="Use this name for migration file(s).",
|
| 62 |
+
)
|
| 63 |
+
parser.add_argument(
|
| 64 |
+
"--no-header",
|
| 65 |
+
action="store_false",
|
| 66 |
+
dest="include_header",
|
| 67 |
+
help="Do not add header comments to new migration file(s).",
|
| 68 |
+
)
|
| 69 |
+
parser.add_argument(
|
| 70 |
+
"--check",
|
| 71 |
+
action="store_true",
|
| 72 |
+
dest="check_changes",
|
| 73 |
+
help=(
|
| 74 |
+
"Exit with a non-zero status if model changes are missing migrations "
|
| 75 |
+
"and don't actually write them."
|
| 76 |
+
),
|
| 77 |
+
)
|
| 78 |
+
parser.add_argument(
|
| 79 |
+
"--scriptable",
|
| 80 |
+
action="store_true",
|
| 81 |
+
dest="scriptable",
|
| 82 |
+
help=(
|
| 83 |
+
"Divert log output and input prompts to stderr, writing only "
|
| 84 |
+
"paths of generated migration files to stdout."
|
| 85 |
+
),
|
| 86 |
+
)
|
| 87 |
+
parser.add_argument(
|
| 88 |
+
"--update",
|
| 89 |
+
action="store_true",
|
| 90 |
+
dest="update",
|
| 91 |
+
help=(
|
| 92 |
+
"Merge model changes into the latest migration and optimize the "
|
| 93 |
+
"resulting operations."
|
| 94 |
+
),
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
@property
|
| 98 |
+
def log_output(self):
|
| 99 |
+
return self.stderr if self.scriptable else self.stdout
|
| 100 |
+
|
| 101 |
+
def log(self, msg):
|
| 102 |
+
self.log_output.write(msg)
|
| 103 |
+
|
| 104 |
+
@no_translations
|
| 105 |
+
def handle(self, *app_labels, **options):
|
| 106 |
+
self.written_files = []
|
| 107 |
+
self.verbosity = options["verbosity"]
|
| 108 |
+
self.interactive = options["interactive"]
|
| 109 |
+
self.dry_run = options["dry_run"]
|
| 110 |
+
self.merge = options["merge"]
|
| 111 |
+
self.empty = options["empty"]
|
| 112 |
+
self.migration_name = options["name"]
|
| 113 |
+
if self.migration_name and not self.migration_name.isidentifier():
|
| 114 |
+
raise CommandError("The migration name must be a valid Python identifier.")
|
| 115 |
+
self.include_header = options["include_header"]
|
| 116 |
+
check_changes = options["check_changes"]
|
| 117 |
+
self.scriptable = options["scriptable"]
|
| 118 |
+
self.update = options["update"]
|
| 119 |
+
# If logs and prompts are diverted to stderr, remove the ERROR style.
|
| 120 |
+
if self.scriptable:
|
| 121 |
+
self.stderr.style_func = None
|
| 122 |
+
|
| 123 |
+
# Make sure the app they asked for exists
|
| 124 |
+
app_labels = set(app_labels)
|
| 125 |
+
has_bad_labels = False
|
| 126 |
+
for app_label in app_labels:
|
| 127 |
+
try:
|
| 128 |
+
apps.get_app_config(app_label)
|
| 129 |
+
except LookupError as err:
|
| 130 |
+
self.stderr.write(str(err))
|
| 131 |
+
has_bad_labels = True
|
| 132 |
+
if has_bad_labels:
|
| 133 |
+
sys.exit(2)
|
| 134 |
+
|
| 135 |
+
# Load the current graph state. Pass in None for the connection so
|
| 136 |
+
# the loader doesn't try to resolve replaced migrations from DB.
|
| 137 |
+
loader = MigrationLoader(None, ignore_no_migrations=True)
|
| 138 |
+
|
| 139 |
+
# Raise an error if any migrations are applied before their dependencies.
|
| 140 |
+
consistency_check_labels = {config.label for config in apps.get_app_configs()}
|
| 141 |
+
# Non-default databases are only checked if database routers used.
|
| 142 |
+
aliases_to_check = (
|
| 143 |
+
connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS]
|
| 144 |
+
)
|
| 145 |
+
for alias in sorted(aliases_to_check):
|
| 146 |
+
connection = connections[alias]
|
| 147 |
+
if connection.settings_dict["ENGINE"] != "django.db.backends.dummy" and any(
|
| 148 |
+
# At least one model must be migrated to the database.
|
| 149 |
+
router.allow_migrate(
|
| 150 |
+
connection.alias, app_label, model_name=model._meta.object_name
|
| 151 |
+
)
|
| 152 |
+
for app_label in consistency_check_labels
|
| 153 |
+
for model in apps.get_app_config(app_label).get_models()
|
| 154 |
+
):
|
| 155 |
+
try:
|
| 156 |
+
loader.check_consistent_history(connection)
|
| 157 |
+
except OperationalError as error:
|
| 158 |
+
warnings.warn(
|
| 159 |
+
"Got an error checking a consistent migration history "
|
| 160 |
+
"performed for database connection '%s': %s" % (alias, error),
|
| 161 |
+
RuntimeWarning,
|
| 162 |
+
)
|
| 163 |
+
# Before anything else, see if there's conflicting apps and drop out
|
| 164 |
+
# hard if there are any and they don't want to merge
|
| 165 |
+
conflicts = loader.detect_conflicts()
|
| 166 |
+
|
| 167 |
+
# If app_labels is specified, filter out conflicting migrations for
|
| 168 |
+
# unspecified apps.
|
| 169 |
+
if app_labels:
|
| 170 |
+
conflicts = {
|
| 171 |
+
app_label: conflict
|
| 172 |
+
for app_label, conflict in conflicts.items()
|
| 173 |
+
if app_label in app_labels
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
if conflicts and not self.merge:
|
| 177 |
+
name_str = "; ".join(
|
| 178 |
+
"%s in %s" % (", ".join(names), app) for app, names in conflicts.items()
|
| 179 |
+
)
|
| 180 |
+
raise CommandError(
|
| 181 |
+
"Conflicting migrations detected; multiple leaf nodes in the "
|
| 182 |
+
"migration graph: (%s).\nTo fix them run "
|
| 183 |
+
"'python manage.py makemigrations --merge'" % name_str
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
# If they want to merge and there's nothing to merge, then politely exit
|
| 187 |
+
if self.merge and not conflicts:
|
| 188 |
+
self.log("No conflicts detected to merge.")
|
| 189 |
+
return
|
| 190 |
+
|
| 191 |
+
# If they want to merge and there is something to merge, then
|
| 192 |
+
# divert into the merge code
|
| 193 |
+
if self.merge and conflicts:
|
| 194 |
+
return self.handle_merge(loader, conflicts)
|
| 195 |
+
|
| 196 |
+
if self.interactive:
|
| 197 |
+
questioner = InteractiveMigrationQuestioner(
|
| 198 |
+
specified_apps=app_labels,
|
| 199 |
+
dry_run=self.dry_run,
|
| 200 |
+
prompt_output=self.log_output,
|
| 201 |
+
)
|
| 202 |
+
else:
|
| 203 |
+
questioner = NonInteractiveMigrationQuestioner(
|
| 204 |
+
specified_apps=app_labels,
|
| 205 |
+
dry_run=self.dry_run,
|
| 206 |
+
verbosity=self.verbosity,
|
| 207 |
+
log=self.log,
|
| 208 |
+
)
|
| 209 |
+
# Set up autodetector
|
| 210 |
+
autodetector = MigrationAutodetector(
|
| 211 |
+
loader.project_state(),
|
| 212 |
+
ProjectState.from_apps(apps),
|
| 213 |
+
questioner,
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
# If they want to make an empty migration, make one for each app
|
| 217 |
+
if self.empty:
|
| 218 |
+
if not app_labels:
|
| 219 |
+
raise CommandError(
|
| 220 |
+
"You must supply at least one app label when using --empty."
|
| 221 |
+
)
|
| 222 |
+
# Make a fake changes() result we can pass to arrange_for_graph
|
| 223 |
+
changes = {app: [Migration("custom", app)] for app in app_labels}
|
| 224 |
+
changes = autodetector.arrange_for_graph(
|
| 225 |
+
changes=changes,
|
| 226 |
+
graph=loader.graph,
|
| 227 |
+
migration_name=self.migration_name,
|
| 228 |
+
)
|
| 229 |
+
self.write_migration_files(changes)
|
| 230 |
+
return
|
| 231 |
+
|
| 232 |
+
# Detect changes
|
| 233 |
+
changes = autodetector.changes(
|
| 234 |
+
graph=loader.graph,
|
| 235 |
+
trim_to_apps=app_labels or None,
|
| 236 |
+
convert_apps=app_labels or None,
|
| 237 |
+
migration_name=self.migration_name,
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
if not changes:
|
| 241 |
+
# No changes? Tell them.
|
| 242 |
+
if self.verbosity >= 1:
|
| 243 |
+
if app_labels:
|
| 244 |
+
if len(app_labels) == 1:
|
| 245 |
+
self.log("No changes detected in app '%s'" % app_labels.pop())
|
| 246 |
+
else:
|
| 247 |
+
self.log(
|
| 248 |
+
"No changes detected in apps '%s'"
|
| 249 |
+
% ("', '".join(app_labels))
|
| 250 |
+
)
|
| 251 |
+
else:
|
| 252 |
+
self.log("No changes detected")
|
| 253 |
+
else:
|
| 254 |
+
if check_changes:
|
| 255 |
+
sys.exit(1)
|
| 256 |
+
if self.update:
|
| 257 |
+
self.write_to_last_migration_files(changes)
|
| 258 |
+
else:
|
| 259 |
+
self.write_migration_files(changes)
|
| 260 |
+
|
| 261 |
+
def write_to_last_migration_files(self, changes):
|
| 262 |
+
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
|
| 263 |
+
new_changes = {}
|
| 264 |
+
update_previous_migration_paths = {}
|
| 265 |
+
for app_label, app_migrations in changes.items():
|
| 266 |
+
# Find last migration.
|
| 267 |
+
leaf_migration_nodes = loader.graph.leaf_nodes(app=app_label)
|
| 268 |
+
if len(leaf_migration_nodes) == 0:
|
| 269 |
+
raise CommandError(
|
| 270 |
+
f"App {app_label} has no migration, cannot update last migration."
|
| 271 |
+
)
|
| 272 |
+
leaf_migration_node = leaf_migration_nodes[0]
|
| 273 |
+
# Multiple leaf nodes have already been checked earlier in command.
|
| 274 |
+
leaf_migration = loader.graph.nodes[leaf_migration_node]
|
| 275 |
+
# Updated migration cannot be a squash migration, a dependency of
|
| 276 |
+
# another migration, and cannot be already applied.
|
| 277 |
+
if leaf_migration.replaces:
|
| 278 |
+
raise CommandError(
|
| 279 |
+
f"Cannot update squash migration '{leaf_migration}'."
|
| 280 |
+
)
|
| 281 |
+
if leaf_migration_node in loader.applied_migrations:
|
| 282 |
+
raise CommandError(
|
| 283 |
+
f"Cannot update applied migration '{leaf_migration}'."
|
| 284 |
+
)
|
| 285 |
+
depending_migrations = [
|
| 286 |
+
migration
|
| 287 |
+
for migration in loader.disk_migrations.values()
|
| 288 |
+
if leaf_migration_node in migration.dependencies
|
| 289 |
+
]
|
| 290 |
+
if depending_migrations:
|
| 291 |
+
formatted_migrations = ", ".join(
|
| 292 |
+
[f"'{migration}'" for migration in depending_migrations]
|
| 293 |
+
)
|
| 294 |
+
raise CommandError(
|
| 295 |
+
f"Cannot update migration '{leaf_migration}' that migrations "
|
| 296 |
+
f"{formatted_migrations} depend on."
|
| 297 |
+
)
|
| 298 |
+
# Build new migration.
|
| 299 |
+
for migration in app_migrations:
|
| 300 |
+
leaf_migration.operations.extend(migration.operations)
|
| 301 |
+
|
| 302 |
+
for dependency in migration.dependencies:
|
| 303 |
+
if isinstance(dependency, SwappableTuple):
|
| 304 |
+
if settings.AUTH_USER_MODEL == dependency.setting:
|
| 305 |
+
leaf_migration.dependencies.append(
|
| 306 |
+
("__setting__", "AUTH_USER_MODEL")
|
| 307 |
+
)
|
| 308 |
+
else:
|
| 309 |
+
leaf_migration.dependencies.append(dependency)
|
| 310 |
+
elif dependency[0] != migration.app_label:
|
| 311 |
+
leaf_migration.dependencies.append(dependency)
|
| 312 |
+
# Optimize migration.
|
| 313 |
+
optimizer = MigrationOptimizer()
|
| 314 |
+
leaf_migration.operations = optimizer.optimize(
|
| 315 |
+
leaf_migration.operations, app_label
|
| 316 |
+
)
|
| 317 |
+
# Update name.
|
| 318 |
+
previous_migration_path = MigrationWriter(leaf_migration).path
|
| 319 |
+
name_fragment = self.migration_name or leaf_migration.suggest_name()
|
| 320 |
+
suggested_name = leaf_migration.name[:4] + f"_{name_fragment}"
|
| 321 |
+
if leaf_migration.name == suggested_name:
|
| 322 |
+
new_name = leaf_migration.name + "_updated"
|
| 323 |
+
else:
|
| 324 |
+
new_name = suggested_name
|
| 325 |
+
leaf_migration.name = new_name
|
| 326 |
+
# Register overridden migration.
|
| 327 |
+
new_changes[app_label] = [leaf_migration]
|
| 328 |
+
update_previous_migration_paths[app_label] = previous_migration_path
|
| 329 |
+
|
| 330 |
+
self.write_migration_files(new_changes, update_previous_migration_paths)
|
| 331 |
+
|
| 332 |
+
def write_migration_files(self, changes, update_previous_migration_paths=None):
|
| 333 |
+
"""
|
| 334 |
+
Take a changes dict and write them out as migration files.
|
| 335 |
+
"""
|
| 336 |
+
directory_created = {}
|
| 337 |
+
for app_label, app_migrations in changes.items():
|
| 338 |
+
if self.verbosity >= 1:
|
| 339 |
+
self.log(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label))
|
| 340 |
+
for migration in app_migrations:
|
| 341 |
+
# Describe the migration
|
| 342 |
+
writer = MigrationWriter(migration, self.include_header)
|
| 343 |
+
if self.verbosity >= 1:
|
| 344 |
+
# Display a relative path if it's below the current working
|
| 345 |
+
# directory, or an absolute path otherwise.
|
| 346 |
+
migration_string = self.get_relative_path(writer.path)
|
| 347 |
+
self.log(" %s\n" % self.style.MIGRATE_LABEL(migration_string))
|
| 348 |
+
for operation in migration.operations:
|
| 349 |
+
self.log(" - %s" % operation.describe())
|
| 350 |
+
if self.scriptable:
|
| 351 |
+
self.stdout.write(migration_string)
|
| 352 |
+
if not self.dry_run:
|
| 353 |
+
# Write the migrations file to the disk.
|
| 354 |
+
migrations_directory = os.path.dirname(writer.path)
|
| 355 |
+
if not directory_created.get(app_label):
|
| 356 |
+
os.makedirs(migrations_directory, exist_ok=True)
|
| 357 |
+
init_path = os.path.join(migrations_directory, "__init__.py")
|
| 358 |
+
if not os.path.isfile(init_path):
|
| 359 |
+
open(init_path, "w").close()
|
| 360 |
+
# We just do this once per app
|
| 361 |
+
directory_created[app_label] = True
|
| 362 |
+
migration_string = writer.as_string()
|
| 363 |
+
with open(writer.path, "w", encoding="utf-8") as fh:
|
| 364 |
+
fh.write(migration_string)
|
| 365 |
+
self.written_files.append(writer.path)
|
| 366 |
+
if update_previous_migration_paths:
|
| 367 |
+
prev_path = update_previous_migration_paths[app_label]
|
| 368 |
+
rel_prev_path = self.get_relative_path(prev_path)
|
| 369 |
+
if writer.needs_manual_porting:
|
| 370 |
+
migration_path = self.get_relative_path(writer.path)
|
| 371 |
+
self.log(
|
| 372 |
+
self.style.WARNING(
|
| 373 |
+
f"Updated migration {migration_path} requires "
|
| 374 |
+
f"manual porting.\n"
|
| 375 |
+
f"Previous migration {rel_prev_path} was kept and "
|
| 376 |
+
f"must be deleted after porting functions manually."
|
| 377 |
+
)
|
| 378 |
+
)
|
| 379 |
+
else:
|
| 380 |
+
os.remove(prev_path)
|
| 381 |
+
self.log(f"Deleted {rel_prev_path}")
|
| 382 |
+
elif self.verbosity == 3:
|
| 383 |
+
# Alternatively, makemigrations --dry-run --verbosity 3
|
| 384 |
+
# will log the migrations rather than saving the file to
|
| 385 |
+
# the disk.
|
| 386 |
+
self.log(
|
| 387 |
+
self.style.MIGRATE_HEADING(
|
| 388 |
+
"Full migrations file '%s':" % writer.filename
|
| 389 |
+
)
|
| 390 |
+
)
|
| 391 |
+
self.log(writer.as_string())
|
| 392 |
+
run_formatters(self.written_files)
|
| 393 |
+
|
| 394 |
+
@staticmethod
|
| 395 |
+
def get_relative_path(path):
|
| 396 |
+
try:
|
| 397 |
+
migration_string = os.path.relpath(path)
|
| 398 |
+
except ValueError:
|
| 399 |
+
migration_string = path
|
| 400 |
+
if migration_string.startswith(".."):
|
| 401 |
+
migration_string = path
|
| 402 |
+
return migration_string
|
| 403 |
+
|
| 404 |
+
def handle_merge(self, loader, conflicts):
|
| 405 |
+
"""
|
| 406 |
+
Handles merging together conflicted migrations interactively,
|
| 407 |
+
if it's safe; otherwise, advises on how to fix it.
|
| 408 |
+
"""
|
| 409 |
+
if self.interactive:
|
| 410 |
+
questioner = InteractiveMigrationQuestioner(prompt_output=self.log_output)
|
| 411 |
+
else:
|
| 412 |
+
questioner = MigrationQuestioner(defaults={"ask_merge": True})
|
| 413 |
+
|
| 414 |
+
for app_label, migration_names in conflicts.items():
|
| 415 |
+
# Grab out the migrations in question, and work out their
|
| 416 |
+
# common ancestor.
|
| 417 |
+
merge_migrations = []
|
| 418 |
+
for migration_name in migration_names:
|
| 419 |
+
migration = loader.get_migration(app_label, migration_name)
|
| 420 |
+
migration.ancestry = [
|
| 421 |
+
mig
|
| 422 |
+
for mig in loader.graph.forwards_plan((app_label, migration_name))
|
| 423 |
+
if mig[0] == migration.app_label
|
| 424 |
+
]
|
| 425 |
+
merge_migrations.append(migration)
|
| 426 |
+
|
| 427 |
+
def all_items_equal(seq):
|
| 428 |
+
return all(item == seq[0] for item in seq[1:])
|
| 429 |
+
|
| 430 |
+
merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations))
|
| 431 |
+
common_ancestor_count = sum(
|
| 432 |
+
1
|
| 433 |
+
for common_ancestor_generation in takewhile(
|
| 434 |
+
all_items_equal, merge_migrations_generations
|
| 435 |
+
)
|
| 436 |
+
)
|
| 437 |
+
if not common_ancestor_count:
|
| 438 |
+
raise ValueError(
|
| 439 |
+
"Could not find common ancestor of %s" % migration_names
|
| 440 |
+
)
|
| 441 |
+
# Now work out the operations along each divergent branch
|
| 442 |
+
for migration in merge_migrations:
|
| 443 |
+
migration.branch = migration.ancestry[common_ancestor_count:]
|
| 444 |
+
migrations_ops = (
|
| 445 |
+
loader.get_migration(node_app, node_name).operations
|
| 446 |
+
for node_app, node_name in migration.branch
|
| 447 |
+
)
|
| 448 |
+
migration.merged_operations = sum(migrations_ops, [])
|
| 449 |
+
# In future, this could use some of the Optimizer code
|
| 450 |
+
# (can_optimize_through) to automatically see if they're
|
| 451 |
+
# mergeable. For now, we always just prompt the user.
|
| 452 |
+
if self.verbosity > 0:
|
| 453 |
+
self.log(self.style.MIGRATE_HEADING("Merging %s" % app_label))
|
| 454 |
+
for migration in merge_migrations:
|
| 455 |
+
self.log(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
|
| 456 |
+
for operation in migration.merged_operations:
|
| 457 |
+
self.log(" - %s" % operation.describe())
|
| 458 |
+
if questioner.ask_merge(app_label):
|
| 459 |
+
# If they still want to merge it, then write out an empty
|
| 460 |
+
# file depending on the migrations needing merging.
|
| 461 |
+
numbers = [
|
| 462 |
+
MigrationAutodetector.parse_number(migration.name)
|
| 463 |
+
for migration in merge_migrations
|
| 464 |
+
]
|
| 465 |
+
try:
|
| 466 |
+
biggest_number = max(x for x in numbers if x is not None)
|
| 467 |
+
except ValueError:
|
| 468 |
+
biggest_number = 1
|
| 469 |
+
subclass = type(
|
| 470 |
+
"Migration",
|
| 471 |
+
(Migration,),
|
| 472 |
+
{
|
| 473 |
+
"dependencies": [
|
| 474 |
+
(app_label, migration.name)
|
| 475 |
+
for migration in merge_migrations
|
| 476 |
+
],
|
| 477 |
+
},
|
| 478 |
+
)
|
| 479 |
+
parts = ["%04i" % (biggest_number + 1)]
|
| 480 |
+
if self.migration_name:
|
| 481 |
+
parts.append(self.migration_name)
|
| 482 |
+
else:
|
| 483 |
+
parts.append("merge")
|
| 484 |
+
leaf_names = "_".join(
|
| 485 |
+
sorted(migration.name for migration in merge_migrations)
|
| 486 |
+
)
|
| 487 |
+
if len(leaf_names) > 47:
|
| 488 |
+
parts.append(get_migration_name_timestamp())
|
| 489 |
+
else:
|
| 490 |
+
parts.append(leaf_names)
|
| 491 |
+
migration_name = "_".join(parts)
|
| 492 |
+
new_migration = subclass(migration_name, app_label)
|
| 493 |
+
writer = MigrationWriter(new_migration, self.include_header)
|
| 494 |
+
|
| 495 |
+
if not self.dry_run:
|
| 496 |
+
# Write the merge migrations file to the disk
|
| 497 |
+
with open(writer.path, "w", encoding="utf-8") as fh:
|
| 498 |
+
fh.write(writer.as_string())
|
| 499 |
+
run_formatters([writer.path])
|
| 500 |
+
if self.verbosity > 0:
|
| 501 |
+
self.log("\nCreated new merge migration %s" % writer.path)
|
| 502 |
+
if self.scriptable:
|
| 503 |
+
self.stdout.write(writer.path)
|
| 504 |
+
elif self.verbosity == 3:
|
| 505 |
+
# Alternatively, makemigrations --merge --dry-run --verbosity 3
|
| 506 |
+
# will log the merge migrations rather than saving the file
|
| 507 |
+
# to the disk.
|
| 508 |
+
self.log(
|
| 509 |
+
self.style.MIGRATE_HEADING(
|
| 510 |
+
"Full merge migrations file '%s':" % writer.filename
|
| 511 |
+
)
|
| 512 |
+
)
|
| 513 |
+
self.log(writer.as_string())
|
testbed/django__django/django/core/management/commands/migrate.py
ADDED
|
@@ -0,0 +1,511 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import time
|
| 3 |
+
from importlib import import_module
|
| 4 |
+
|
| 5 |
+
from django.apps import apps
|
| 6 |
+
from django.core.management.base import BaseCommand, CommandError, no_translations
|
| 7 |
+
from django.core.management.sql import emit_post_migrate_signal, emit_pre_migrate_signal
|
| 8 |
+
from django.db import DEFAULT_DB_ALIAS, connections, router
|
| 9 |
+
from django.db.migrations.autodetector import MigrationAutodetector
|
| 10 |
+
from django.db.migrations.executor import MigrationExecutor
|
| 11 |
+
from django.db.migrations.loader import AmbiguityError
|
| 12 |
+
from django.db.migrations.state import ModelState, ProjectState
|
| 13 |
+
from django.utils.module_loading import module_has_submodule
|
| 14 |
+
from django.utils.text import Truncator
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Command(BaseCommand):
|
| 18 |
+
help = (
|
| 19 |
+
"Updates database schema. Manages both apps with migrations and those without."
|
| 20 |
+
)
|
| 21 |
+
requires_system_checks = []
|
| 22 |
+
|
| 23 |
+
def add_arguments(self, parser):
|
| 24 |
+
parser.add_argument(
|
| 25 |
+
"--skip-checks",
|
| 26 |
+
action="store_true",
|
| 27 |
+
help="Skip system checks.",
|
| 28 |
+
)
|
| 29 |
+
parser.add_argument(
|
| 30 |
+
"app_label",
|
| 31 |
+
nargs="?",
|
| 32 |
+
help="App label of an application to synchronize the state.",
|
| 33 |
+
)
|
| 34 |
+
parser.add_argument(
|
| 35 |
+
"migration_name",
|
| 36 |
+
nargs="?",
|
| 37 |
+
help="Database state will be brought to the state after that "
|
| 38 |
+
'migration. Use the name "zero" to unapply all migrations.',
|
| 39 |
+
)
|
| 40 |
+
parser.add_argument(
|
| 41 |
+
"--noinput",
|
| 42 |
+
"--no-input",
|
| 43 |
+
action="store_false",
|
| 44 |
+
dest="interactive",
|
| 45 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 46 |
+
)
|
| 47 |
+
parser.add_argument(
|
| 48 |
+
"--database",
|
| 49 |
+
default=DEFAULT_DB_ALIAS,
|
| 50 |
+
help=(
|
| 51 |
+
'Nominates a database to synchronize. Defaults to the "default" '
|
| 52 |
+
"database."
|
| 53 |
+
),
|
| 54 |
+
)
|
| 55 |
+
parser.add_argument(
|
| 56 |
+
"--fake",
|
| 57 |
+
action="store_true",
|
| 58 |
+
help="Mark migrations as run without actually running them.",
|
| 59 |
+
)
|
| 60 |
+
parser.add_argument(
|
| 61 |
+
"--fake-initial",
|
| 62 |
+
action="store_true",
|
| 63 |
+
help=(
|
| 64 |
+
"Detect if tables already exist and fake-apply initial migrations if "
|
| 65 |
+
"so. Make sure that the current database schema matches your initial "
|
| 66 |
+
"migration before using this flag. Django will only check for an "
|
| 67 |
+
"existing table name."
|
| 68 |
+
),
|
| 69 |
+
)
|
| 70 |
+
parser.add_argument(
|
| 71 |
+
"--plan",
|
| 72 |
+
action="store_true",
|
| 73 |
+
help="Shows a list of the migration actions that will be performed.",
|
| 74 |
+
)
|
| 75 |
+
parser.add_argument(
|
| 76 |
+
"--run-syncdb",
|
| 77 |
+
action="store_true",
|
| 78 |
+
help="Creates tables for apps without migrations.",
|
| 79 |
+
)
|
| 80 |
+
parser.add_argument(
|
| 81 |
+
"--check",
|
| 82 |
+
action="store_true",
|
| 83 |
+
dest="check_unapplied",
|
| 84 |
+
help=(
|
| 85 |
+
"Exits with a non-zero status if unapplied migrations exist and does "
|
| 86 |
+
"not actually apply migrations."
|
| 87 |
+
),
|
| 88 |
+
)
|
| 89 |
+
parser.add_argument(
|
| 90 |
+
"--prune",
|
| 91 |
+
action="store_true",
|
| 92 |
+
dest="prune",
|
| 93 |
+
help="Delete nonexistent migrations from the django_migrations table.",
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
@no_translations
|
| 97 |
+
def handle(self, *args, **options):
|
| 98 |
+
database = options["database"]
|
| 99 |
+
if not options["skip_checks"]:
|
| 100 |
+
self.check(databases=[database])
|
| 101 |
+
|
| 102 |
+
self.verbosity = options["verbosity"]
|
| 103 |
+
self.interactive = options["interactive"]
|
| 104 |
+
|
| 105 |
+
# Import the 'management' module within each installed app, to register
|
| 106 |
+
# dispatcher events.
|
| 107 |
+
for app_config in apps.get_app_configs():
|
| 108 |
+
if module_has_submodule(app_config.module, "management"):
|
| 109 |
+
import_module(".management", app_config.name)
|
| 110 |
+
|
| 111 |
+
# Get the database we're operating from
|
| 112 |
+
connection = connections[database]
|
| 113 |
+
|
| 114 |
+
# Hook for backends needing any database preparation
|
| 115 |
+
connection.prepare_database()
|
| 116 |
+
# Work out which apps have migrations and which do not
|
| 117 |
+
executor = MigrationExecutor(connection, self.migration_progress_callback)
|
| 118 |
+
|
| 119 |
+
# Raise an error if any migrations are applied before their dependencies.
|
| 120 |
+
executor.loader.check_consistent_history(connection)
|
| 121 |
+
|
| 122 |
+
# Before anything else, see if there's conflicting apps and drop out
|
| 123 |
+
# hard if there are any
|
| 124 |
+
conflicts = executor.loader.detect_conflicts()
|
| 125 |
+
if conflicts:
|
| 126 |
+
name_str = "; ".join(
|
| 127 |
+
"%s in %s" % (", ".join(names), app) for app, names in conflicts.items()
|
| 128 |
+
)
|
| 129 |
+
raise CommandError(
|
| 130 |
+
"Conflicting migrations detected; multiple leaf nodes in the "
|
| 131 |
+
"migration graph: (%s).\nTo fix them run "
|
| 132 |
+
"'python manage.py makemigrations --merge'" % name_str
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
# If they supplied command line arguments, work out what they mean.
|
| 136 |
+
run_syncdb = options["run_syncdb"]
|
| 137 |
+
target_app_labels_only = True
|
| 138 |
+
if options["app_label"]:
|
| 139 |
+
# Validate app_label.
|
| 140 |
+
app_label = options["app_label"]
|
| 141 |
+
try:
|
| 142 |
+
apps.get_app_config(app_label)
|
| 143 |
+
except LookupError as err:
|
| 144 |
+
raise CommandError(str(err))
|
| 145 |
+
if run_syncdb:
|
| 146 |
+
if app_label in executor.loader.migrated_apps:
|
| 147 |
+
raise CommandError(
|
| 148 |
+
"Can't use run_syncdb with app '%s' as it has migrations."
|
| 149 |
+
% app_label
|
| 150 |
+
)
|
| 151 |
+
elif app_label not in executor.loader.migrated_apps:
|
| 152 |
+
raise CommandError("App '%s' does not have migrations." % app_label)
|
| 153 |
+
|
| 154 |
+
if options["app_label"] and options["migration_name"]:
|
| 155 |
+
migration_name = options["migration_name"]
|
| 156 |
+
if migration_name == "zero":
|
| 157 |
+
targets = [(app_label, None)]
|
| 158 |
+
else:
|
| 159 |
+
try:
|
| 160 |
+
migration = executor.loader.get_migration_by_prefix(
|
| 161 |
+
app_label, migration_name
|
| 162 |
+
)
|
| 163 |
+
except AmbiguityError:
|
| 164 |
+
raise CommandError(
|
| 165 |
+
"More than one migration matches '%s' in app '%s'. "
|
| 166 |
+
"Please be more specific." % (migration_name, app_label)
|
| 167 |
+
)
|
| 168 |
+
except KeyError:
|
| 169 |
+
raise CommandError(
|
| 170 |
+
"Cannot find a migration matching '%s' from app '%s'."
|
| 171 |
+
% (migration_name, app_label)
|
| 172 |
+
)
|
| 173 |
+
target = (app_label, migration.name)
|
| 174 |
+
# Partially applied squashed migrations are not included in the
|
| 175 |
+
# graph, use the last replacement instead.
|
| 176 |
+
if (
|
| 177 |
+
target not in executor.loader.graph.nodes
|
| 178 |
+
and target in executor.loader.replacements
|
| 179 |
+
):
|
| 180 |
+
incomplete_migration = executor.loader.replacements[target]
|
| 181 |
+
target = incomplete_migration.replaces[-1]
|
| 182 |
+
targets = [target]
|
| 183 |
+
target_app_labels_only = False
|
| 184 |
+
elif options["app_label"]:
|
| 185 |
+
targets = [
|
| 186 |
+
key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label
|
| 187 |
+
]
|
| 188 |
+
else:
|
| 189 |
+
targets = executor.loader.graph.leaf_nodes()
|
| 190 |
+
|
| 191 |
+
if options["prune"]:
|
| 192 |
+
if not options["app_label"]:
|
| 193 |
+
raise CommandError(
|
| 194 |
+
"Migrations can be pruned only when an app is specified."
|
| 195 |
+
)
|
| 196 |
+
if self.verbosity > 0:
|
| 197 |
+
self.stdout.write("Pruning migrations:", self.style.MIGRATE_HEADING)
|
| 198 |
+
to_prune = set(executor.loader.applied_migrations) - set(
|
| 199 |
+
executor.loader.disk_migrations
|
| 200 |
+
)
|
| 201 |
+
squashed_migrations_with_deleted_replaced_migrations = [
|
| 202 |
+
migration_key
|
| 203 |
+
for migration_key, migration_obj in executor.loader.replacements.items()
|
| 204 |
+
if any(replaced in to_prune for replaced in migration_obj.replaces)
|
| 205 |
+
]
|
| 206 |
+
if squashed_migrations_with_deleted_replaced_migrations:
|
| 207 |
+
self.stdout.write(
|
| 208 |
+
self.style.NOTICE(
|
| 209 |
+
" Cannot use --prune because the following squashed "
|
| 210 |
+
"migrations have their 'replaces' attributes and may not "
|
| 211 |
+
"be recorded as applied:"
|
| 212 |
+
)
|
| 213 |
+
)
|
| 214 |
+
for migration in squashed_migrations_with_deleted_replaced_migrations:
|
| 215 |
+
app, name = migration
|
| 216 |
+
self.stdout.write(f" {app}.{name}")
|
| 217 |
+
self.stdout.write(
|
| 218 |
+
self.style.NOTICE(
|
| 219 |
+
" Re-run 'manage.py migrate' if they are not marked as "
|
| 220 |
+
"applied, and remove 'replaces' attributes in their "
|
| 221 |
+
"Migration classes."
|
| 222 |
+
)
|
| 223 |
+
)
|
| 224 |
+
else:
|
| 225 |
+
to_prune = sorted(
|
| 226 |
+
migration for migration in to_prune if migration[0] == app_label
|
| 227 |
+
)
|
| 228 |
+
if to_prune:
|
| 229 |
+
for migration in to_prune:
|
| 230 |
+
app, name = migration
|
| 231 |
+
if self.verbosity > 0:
|
| 232 |
+
self.stdout.write(
|
| 233 |
+
self.style.MIGRATE_LABEL(f" Pruning {app}.{name}"),
|
| 234 |
+
ending="",
|
| 235 |
+
)
|
| 236 |
+
executor.recorder.record_unapplied(app, name)
|
| 237 |
+
if self.verbosity > 0:
|
| 238 |
+
self.stdout.write(self.style.SUCCESS(" OK"))
|
| 239 |
+
elif self.verbosity > 0:
|
| 240 |
+
self.stdout.write(" No migrations to prune.")
|
| 241 |
+
|
| 242 |
+
plan = executor.migration_plan(targets)
|
| 243 |
+
|
| 244 |
+
if options["plan"]:
|
| 245 |
+
self.stdout.write("Planned operations:", self.style.MIGRATE_LABEL)
|
| 246 |
+
if not plan:
|
| 247 |
+
self.stdout.write(" No planned migration operations.")
|
| 248 |
+
else:
|
| 249 |
+
for migration, backwards in plan:
|
| 250 |
+
self.stdout.write(str(migration), self.style.MIGRATE_HEADING)
|
| 251 |
+
for operation in migration.operations:
|
| 252 |
+
message, is_error = self.describe_operation(
|
| 253 |
+
operation, backwards
|
| 254 |
+
)
|
| 255 |
+
style = self.style.WARNING if is_error else None
|
| 256 |
+
self.stdout.write(" " + message, style)
|
| 257 |
+
if options["check_unapplied"]:
|
| 258 |
+
sys.exit(1)
|
| 259 |
+
return
|
| 260 |
+
if options["check_unapplied"]:
|
| 261 |
+
if plan:
|
| 262 |
+
sys.exit(1)
|
| 263 |
+
return
|
| 264 |
+
if options["prune"]:
|
| 265 |
+
return
|
| 266 |
+
|
| 267 |
+
# At this point, ignore run_syncdb if there aren't any apps to sync.
|
| 268 |
+
run_syncdb = options["run_syncdb"] and executor.loader.unmigrated_apps
|
| 269 |
+
# Print some useful info
|
| 270 |
+
if self.verbosity >= 1:
|
| 271 |
+
self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
|
| 272 |
+
if run_syncdb:
|
| 273 |
+
if options["app_label"]:
|
| 274 |
+
self.stdout.write(
|
| 275 |
+
self.style.MIGRATE_LABEL(
|
| 276 |
+
" Synchronize unmigrated app: %s" % app_label
|
| 277 |
+
)
|
| 278 |
+
)
|
| 279 |
+
else:
|
| 280 |
+
self.stdout.write(
|
| 281 |
+
self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ")
|
| 282 |
+
+ (", ".join(sorted(executor.loader.unmigrated_apps)))
|
| 283 |
+
)
|
| 284 |
+
if target_app_labels_only:
|
| 285 |
+
self.stdout.write(
|
| 286 |
+
self.style.MIGRATE_LABEL(" Apply all migrations: ")
|
| 287 |
+
+ (", ".join(sorted({a for a, n in targets})) or "(none)")
|
| 288 |
+
)
|
| 289 |
+
else:
|
| 290 |
+
if targets[0][1] is None:
|
| 291 |
+
self.stdout.write(
|
| 292 |
+
self.style.MIGRATE_LABEL(" Unapply all migrations: ")
|
| 293 |
+
+ str(targets[0][0])
|
| 294 |
+
)
|
| 295 |
+
else:
|
| 296 |
+
self.stdout.write(
|
| 297 |
+
self.style.MIGRATE_LABEL(" Target specific migration: ")
|
| 298 |
+
+ "%s, from %s" % (targets[0][1], targets[0][0])
|
| 299 |
+
)
|
| 300 |
+
|
| 301 |
+
pre_migrate_state = executor._create_project_state(with_applied_migrations=True)
|
| 302 |
+
pre_migrate_apps = pre_migrate_state.apps
|
| 303 |
+
emit_pre_migrate_signal(
|
| 304 |
+
self.verbosity,
|
| 305 |
+
self.interactive,
|
| 306 |
+
connection.alias,
|
| 307 |
+
stdout=self.stdout,
|
| 308 |
+
apps=pre_migrate_apps,
|
| 309 |
+
plan=plan,
|
| 310 |
+
)
|
| 311 |
+
|
| 312 |
+
# Run the syncdb phase.
|
| 313 |
+
if run_syncdb:
|
| 314 |
+
if self.verbosity >= 1:
|
| 315 |
+
self.stdout.write(
|
| 316 |
+
self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")
|
| 317 |
+
)
|
| 318 |
+
if options["app_label"]:
|
| 319 |
+
self.sync_apps(connection, [app_label])
|
| 320 |
+
else:
|
| 321 |
+
self.sync_apps(connection, executor.loader.unmigrated_apps)
|
| 322 |
+
|
| 323 |
+
# Migrate!
|
| 324 |
+
if self.verbosity >= 1:
|
| 325 |
+
self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
|
| 326 |
+
if not plan:
|
| 327 |
+
if self.verbosity >= 1:
|
| 328 |
+
self.stdout.write(" No migrations to apply.")
|
| 329 |
+
# If there's changes that aren't in migrations yet, tell them
|
| 330 |
+
# how to fix it.
|
| 331 |
+
autodetector = MigrationAutodetector(
|
| 332 |
+
executor.loader.project_state(),
|
| 333 |
+
ProjectState.from_apps(apps),
|
| 334 |
+
)
|
| 335 |
+
changes = autodetector.changes(graph=executor.loader.graph)
|
| 336 |
+
if changes:
|
| 337 |
+
self.stdout.write(
|
| 338 |
+
self.style.NOTICE(
|
| 339 |
+
" Your models in app(s): %s have changes that are not "
|
| 340 |
+
"yet reflected in a migration, and so won't be "
|
| 341 |
+
"applied." % ", ".join(repr(app) for app in sorted(changes))
|
| 342 |
+
)
|
| 343 |
+
)
|
| 344 |
+
self.stdout.write(
|
| 345 |
+
self.style.NOTICE(
|
| 346 |
+
" Run 'manage.py makemigrations' to make new "
|
| 347 |
+
"migrations, and then re-run 'manage.py migrate' to "
|
| 348 |
+
"apply them."
|
| 349 |
+
)
|
| 350 |
+
)
|
| 351 |
+
fake = False
|
| 352 |
+
fake_initial = False
|
| 353 |
+
else:
|
| 354 |
+
fake = options["fake"]
|
| 355 |
+
fake_initial = options["fake_initial"]
|
| 356 |
+
post_migrate_state = executor.migrate(
|
| 357 |
+
targets,
|
| 358 |
+
plan=plan,
|
| 359 |
+
state=pre_migrate_state.clone(),
|
| 360 |
+
fake=fake,
|
| 361 |
+
fake_initial=fake_initial,
|
| 362 |
+
)
|
| 363 |
+
# post_migrate signals have access to all models. Ensure that all models
|
| 364 |
+
# are reloaded in case any are delayed.
|
| 365 |
+
post_migrate_state.clear_delayed_apps_cache()
|
| 366 |
+
post_migrate_apps = post_migrate_state.apps
|
| 367 |
+
|
| 368 |
+
# Re-render models of real apps to include relationships now that
|
| 369 |
+
# we've got a final state. This wouldn't be necessary if real apps
|
| 370 |
+
# models were rendered with relationships in the first place.
|
| 371 |
+
with post_migrate_apps.bulk_update():
|
| 372 |
+
model_keys = []
|
| 373 |
+
for model_state in post_migrate_apps.real_models:
|
| 374 |
+
model_key = model_state.app_label, model_state.name_lower
|
| 375 |
+
model_keys.append(model_key)
|
| 376 |
+
post_migrate_apps.unregister_model(*model_key)
|
| 377 |
+
post_migrate_apps.render_multiple(
|
| 378 |
+
[ModelState.from_model(apps.get_model(*model)) for model in model_keys]
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
# Send the post_migrate signal, so individual apps can do whatever they need
|
| 382 |
+
# to do at this point.
|
| 383 |
+
emit_post_migrate_signal(
|
| 384 |
+
self.verbosity,
|
| 385 |
+
self.interactive,
|
| 386 |
+
connection.alias,
|
| 387 |
+
stdout=self.stdout,
|
| 388 |
+
apps=post_migrate_apps,
|
| 389 |
+
plan=plan,
|
| 390 |
+
)
|
| 391 |
+
|
| 392 |
+
def migration_progress_callback(self, action, migration=None, fake=False):
|
| 393 |
+
if self.verbosity >= 1:
|
| 394 |
+
compute_time = self.verbosity > 1
|
| 395 |
+
if action == "apply_start":
|
| 396 |
+
if compute_time:
|
| 397 |
+
self.start = time.monotonic()
|
| 398 |
+
self.stdout.write(" Applying %s..." % migration, ending="")
|
| 399 |
+
self.stdout.flush()
|
| 400 |
+
elif action == "apply_success":
|
| 401 |
+
elapsed = (
|
| 402 |
+
" (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
| 403 |
+
)
|
| 404 |
+
if fake:
|
| 405 |
+
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
|
| 406 |
+
else:
|
| 407 |
+
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
|
| 408 |
+
elif action == "unapply_start":
|
| 409 |
+
if compute_time:
|
| 410 |
+
self.start = time.monotonic()
|
| 411 |
+
self.stdout.write(" Unapplying %s..." % migration, ending="")
|
| 412 |
+
self.stdout.flush()
|
| 413 |
+
elif action == "unapply_success":
|
| 414 |
+
elapsed = (
|
| 415 |
+
" (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
| 416 |
+
)
|
| 417 |
+
if fake:
|
| 418 |
+
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
|
| 419 |
+
else:
|
| 420 |
+
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
|
| 421 |
+
elif action == "render_start":
|
| 422 |
+
if compute_time:
|
| 423 |
+
self.start = time.monotonic()
|
| 424 |
+
self.stdout.write(" Rendering model states...", ending="")
|
| 425 |
+
self.stdout.flush()
|
| 426 |
+
elif action == "render_success":
|
| 427 |
+
elapsed = (
|
| 428 |
+
" (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
|
| 429 |
+
)
|
| 430 |
+
self.stdout.write(self.style.SUCCESS(" DONE" + elapsed))
|
| 431 |
+
|
| 432 |
+
def sync_apps(self, connection, app_labels):
|
| 433 |
+
"""Run the old syncdb-style operation on a list of app_labels."""
|
| 434 |
+
with connection.cursor() as cursor:
|
| 435 |
+
tables = connection.introspection.table_names(cursor)
|
| 436 |
+
|
| 437 |
+
# Build the manifest of apps and models that are to be synchronized.
|
| 438 |
+
all_models = [
|
| 439 |
+
(
|
| 440 |
+
app_config.label,
|
| 441 |
+
router.get_migratable_models(
|
| 442 |
+
app_config, connection.alias, include_auto_created=False
|
| 443 |
+
),
|
| 444 |
+
)
|
| 445 |
+
for app_config in apps.get_app_configs()
|
| 446 |
+
if app_config.models_module is not None and app_config.label in app_labels
|
| 447 |
+
]
|
| 448 |
+
|
| 449 |
+
def model_installed(model):
|
| 450 |
+
opts = model._meta
|
| 451 |
+
converter = connection.introspection.identifier_converter
|
| 452 |
+
return not (
|
| 453 |
+
(converter(opts.db_table) in tables)
|
| 454 |
+
or (
|
| 455 |
+
opts.auto_created
|
| 456 |
+
and converter(opts.auto_created._meta.db_table) in tables
|
| 457 |
+
)
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
manifest = {
|
| 461 |
+
app_name: list(filter(model_installed, model_list))
|
| 462 |
+
for app_name, model_list in all_models
|
| 463 |
+
}
|
| 464 |
+
|
| 465 |
+
# Create the tables for each model
|
| 466 |
+
if self.verbosity >= 1:
|
| 467 |
+
self.stdout.write(" Creating tables...")
|
| 468 |
+
with connection.schema_editor() as editor:
|
| 469 |
+
for app_name, model_list in manifest.items():
|
| 470 |
+
for model in model_list:
|
| 471 |
+
# Never install unmanaged models, etc.
|
| 472 |
+
if not model._meta.can_migrate(connection):
|
| 473 |
+
continue
|
| 474 |
+
if self.verbosity >= 3:
|
| 475 |
+
self.stdout.write(
|
| 476 |
+
" Processing %s.%s model"
|
| 477 |
+
% (app_name, model._meta.object_name)
|
| 478 |
+
)
|
| 479 |
+
if self.verbosity >= 1:
|
| 480 |
+
self.stdout.write(
|
| 481 |
+
" Creating table %s" % model._meta.db_table
|
| 482 |
+
)
|
| 483 |
+
editor.create_model(model)
|
| 484 |
+
|
| 485 |
+
# Deferred SQL is executed when exiting the editor's context.
|
| 486 |
+
if self.verbosity >= 1:
|
| 487 |
+
self.stdout.write(" Running deferred SQL...")
|
| 488 |
+
|
| 489 |
+
@staticmethod
|
| 490 |
+
def describe_operation(operation, backwards):
|
| 491 |
+
"""Return a string that describes a migration operation for --plan."""
|
| 492 |
+
prefix = ""
|
| 493 |
+
is_error = False
|
| 494 |
+
if hasattr(operation, "code"):
|
| 495 |
+
code = operation.reverse_code if backwards else operation.code
|
| 496 |
+
action = (code.__doc__ or "") if code else None
|
| 497 |
+
elif hasattr(operation, "sql"):
|
| 498 |
+
action = operation.reverse_sql if backwards else operation.sql
|
| 499 |
+
else:
|
| 500 |
+
action = ""
|
| 501 |
+
if backwards:
|
| 502 |
+
prefix = "Undo "
|
| 503 |
+
if action is not None:
|
| 504 |
+
action = str(action).replace("\n", "")
|
| 505 |
+
elif backwards:
|
| 506 |
+
action = "IRREVERSIBLE"
|
| 507 |
+
is_error = True
|
| 508 |
+
if action:
|
| 509 |
+
action = " -> " + action
|
| 510 |
+
truncated = Truncator(action)
|
| 511 |
+
return prefix + operation.describe() + truncated.chars(40), is_error
|
testbed/django__django/django/core/management/commands/optimizemigration.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import shutil
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
from django.apps import apps
|
| 5 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 6 |
+
from django.core.management.utils import run_formatters
|
| 7 |
+
from django.db import migrations
|
| 8 |
+
from django.db.migrations.exceptions import AmbiguityError
|
| 9 |
+
from django.db.migrations.loader import MigrationLoader
|
| 10 |
+
from django.db.migrations.optimizer import MigrationOptimizer
|
| 11 |
+
from django.db.migrations.writer import MigrationWriter
|
| 12 |
+
from django.utils.version import get_docs_version
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Command(BaseCommand):
|
| 16 |
+
help = "Optimizes the operations for the named migration."
|
| 17 |
+
|
| 18 |
+
def add_arguments(self, parser):
|
| 19 |
+
parser.add_argument(
|
| 20 |
+
"app_label",
|
| 21 |
+
help="App label of the application to optimize the migration for.",
|
| 22 |
+
)
|
| 23 |
+
parser.add_argument(
|
| 24 |
+
"migration_name", help="Migration name to optimize the operations for."
|
| 25 |
+
)
|
| 26 |
+
parser.add_argument(
|
| 27 |
+
"--check",
|
| 28 |
+
action="store_true",
|
| 29 |
+
help="Exit with a non-zero status if the migration can be optimized.",
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def handle(self, *args, **options):
|
| 33 |
+
verbosity = options["verbosity"]
|
| 34 |
+
app_label = options["app_label"]
|
| 35 |
+
migration_name = options["migration_name"]
|
| 36 |
+
check = options["check"]
|
| 37 |
+
|
| 38 |
+
# Validate app_label.
|
| 39 |
+
try:
|
| 40 |
+
apps.get_app_config(app_label)
|
| 41 |
+
except LookupError as err:
|
| 42 |
+
raise CommandError(str(err))
|
| 43 |
+
|
| 44 |
+
# Load the current graph state.
|
| 45 |
+
loader = MigrationLoader(None)
|
| 46 |
+
if app_label not in loader.migrated_apps:
|
| 47 |
+
raise CommandError(f"App '{app_label}' does not have migrations.")
|
| 48 |
+
# Find a migration.
|
| 49 |
+
try:
|
| 50 |
+
migration = loader.get_migration_by_prefix(app_label, migration_name)
|
| 51 |
+
except AmbiguityError:
|
| 52 |
+
raise CommandError(
|
| 53 |
+
f"More than one migration matches '{migration_name}' in app "
|
| 54 |
+
f"'{app_label}'. Please be more specific."
|
| 55 |
+
)
|
| 56 |
+
except KeyError:
|
| 57 |
+
raise CommandError(
|
| 58 |
+
f"Cannot find a migration matching '{migration_name}' from app "
|
| 59 |
+
f"'{app_label}'."
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
# Optimize the migration.
|
| 63 |
+
optimizer = MigrationOptimizer()
|
| 64 |
+
new_operations = optimizer.optimize(migration.operations, migration.app_label)
|
| 65 |
+
if len(migration.operations) == len(new_operations):
|
| 66 |
+
if verbosity > 0:
|
| 67 |
+
self.stdout.write("No optimizations possible.")
|
| 68 |
+
return
|
| 69 |
+
else:
|
| 70 |
+
if verbosity > 0:
|
| 71 |
+
self.stdout.write(
|
| 72 |
+
"Optimizing from %d operations to %d operations."
|
| 73 |
+
% (len(migration.operations), len(new_operations))
|
| 74 |
+
)
|
| 75 |
+
if check:
|
| 76 |
+
sys.exit(1)
|
| 77 |
+
|
| 78 |
+
# Set the new migration optimizations.
|
| 79 |
+
migration.operations = new_operations
|
| 80 |
+
|
| 81 |
+
# Write out the optimized migration file.
|
| 82 |
+
writer = MigrationWriter(migration)
|
| 83 |
+
migration_file_string = writer.as_string()
|
| 84 |
+
if writer.needs_manual_porting:
|
| 85 |
+
if migration.replaces:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
"Migration will require manual porting but is already a squashed "
|
| 88 |
+
"migration.\nTransition to a normal migration first: "
|
| 89 |
+
"https://docs.djangoproject.com/en/%s/topics/migrations/"
|
| 90 |
+
"#squashing-migrations" % get_docs_version()
|
| 91 |
+
)
|
| 92 |
+
# Make a new migration with those operations.
|
| 93 |
+
subclass = type(
|
| 94 |
+
"Migration",
|
| 95 |
+
(migrations.Migration,),
|
| 96 |
+
{
|
| 97 |
+
"dependencies": migration.dependencies,
|
| 98 |
+
"operations": new_operations,
|
| 99 |
+
"replaces": [(migration.app_label, migration.name)],
|
| 100 |
+
},
|
| 101 |
+
)
|
| 102 |
+
optimized_migration_name = "%s_optimized" % migration.name
|
| 103 |
+
optimized_migration = subclass(optimized_migration_name, app_label)
|
| 104 |
+
writer = MigrationWriter(optimized_migration)
|
| 105 |
+
migration_file_string = writer.as_string()
|
| 106 |
+
if verbosity > 0:
|
| 107 |
+
self.stdout.write(
|
| 108 |
+
self.style.MIGRATE_HEADING("Manual porting required") + "\n"
|
| 109 |
+
" Your migrations contained functions that must be manually "
|
| 110 |
+
"copied over,\n"
|
| 111 |
+
" as we could not safely copy their implementation.\n"
|
| 112 |
+
" See the comment at the top of the optimized migration for "
|
| 113 |
+
"details."
|
| 114 |
+
)
|
| 115 |
+
if shutil.which("black"):
|
| 116 |
+
self.stdout.write(
|
| 117 |
+
self.style.WARNING(
|
| 118 |
+
"Optimized migration couldn't be formatted using the "
|
| 119 |
+
'"black" command. You can call it manually.'
|
| 120 |
+
)
|
| 121 |
+
)
|
| 122 |
+
with open(writer.path, "w", encoding="utf-8") as fh:
|
| 123 |
+
fh.write(migration_file_string)
|
| 124 |
+
run_formatters([writer.path])
|
| 125 |
+
|
| 126 |
+
if verbosity > 0:
|
| 127 |
+
self.stdout.write(
|
| 128 |
+
self.style.MIGRATE_HEADING(f"Optimized migration {writer.path}")
|
| 129 |
+
)
|
testbed/django__django/django/core/management/commands/runserver.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
|
| 8 |
+
from django.conf import settings
|
| 9 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 10 |
+
from django.core.servers.basehttp import WSGIServer, get_internal_wsgi_application, run
|
| 11 |
+
from django.utils import autoreload
|
| 12 |
+
from django.utils.regex_helper import _lazy_re_compile
|
| 13 |
+
|
| 14 |
+
naiveip_re = _lazy_re_compile(
|
| 15 |
+
r"""^(?:
|
| 16 |
+
(?P<addr>
|
| 17 |
+
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
|
| 18 |
+
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
|
| 19 |
+
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
|
| 20 |
+
):)?(?P<port>\d+)$""",
|
| 21 |
+
re.X,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class Command(BaseCommand):
|
| 26 |
+
help = "Starts a lightweight web server for development."
|
| 27 |
+
|
| 28 |
+
# Validation is called explicitly each time the server is reloaded.
|
| 29 |
+
requires_system_checks = []
|
| 30 |
+
stealth_options = ("shutdown_message",)
|
| 31 |
+
suppressed_base_arguments = {"--verbosity", "--traceback"}
|
| 32 |
+
|
| 33 |
+
default_addr = "127.0.0.1"
|
| 34 |
+
default_addr_ipv6 = "::1"
|
| 35 |
+
default_port = "8000"
|
| 36 |
+
protocol = "http"
|
| 37 |
+
server_cls = WSGIServer
|
| 38 |
+
|
| 39 |
+
def add_arguments(self, parser):
|
| 40 |
+
parser.add_argument(
|
| 41 |
+
"addrport", nargs="?", help="Optional port number, or ipaddr:port"
|
| 42 |
+
)
|
| 43 |
+
parser.add_argument(
|
| 44 |
+
"--ipv6",
|
| 45 |
+
"-6",
|
| 46 |
+
action="store_true",
|
| 47 |
+
dest="use_ipv6",
|
| 48 |
+
help="Tells Django to use an IPv6 address.",
|
| 49 |
+
)
|
| 50 |
+
parser.add_argument(
|
| 51 |
+
"--nothreading",
|
| 52 |
+
action="store_false",
|
| 53 |
+
dest="use_threading",
|
| 54 |
+
help="Tells Django to NOT use threading.",
|
| 55 |
+
)
|
| 56 |
+
parser.add_argument(
|
| 57 |
+
"--noreload",
|
| 58 |
+
action="store_false",
|
| 59 |
+
dest="use_reloader",
|
| 60 |
+
help="Tells Django to NOT use the auto-reloader.",
|
| 61 |
+
)
|
| 62 |
+
parser.add_argument(
|
| 63 |
+
"--skip-checks",
|
| 64 |
+
action="store_true",
|
| 65 |
+
help="Skip system checks.",
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
def execute(self, *args, **options):
|
| 69 |
+
if options["no_color"]:
|
| 70 |
+
# We rely on the environment because it's currently the only
|
| 71 |
+
# way to reach WSGIRequestHandler. This seems an acceptable
|
| 72 |
+
# compromise considering `runserver` runs indefinitely.
|
| 73 |
+
os.environ["DJANGO_COLORS"] = "nocolor"
|
| 74 |
+
super().execute(*args, **options)
|
| 75 |
+
|
| 76 |
+
def get_handler(self, *args, **options):
|
| 77 |
+
"""Return the default WSGI handler for the runner."""
|
| 78 |
+
return get_internal_wsgi_application()
|
| 79 |
+
|
| 80 |
+
def handle(self, *args, **options):
|
| 81 |
+
if not settings.DEBUG and not settings.ALLOWED_HOSTS:
|
| 82 |
+
raise CommandError("You must set settings.ALLOWED_HOSTS if DEBUG is False.")
|
| 83 |
+
|
| 84 |
+
self.use_ipv6 = options["use_ipv6"]
|
| 85 |
+
if self.use_ipv6 and not socket.has_ipv6:
|
| 86 |
+
raise CommandError("Your Python does not support IPv6.")
|
| 87 |
+
self._raw_ipv6 = False
|
| 88 |
+
if not options["addrport"]:
|
| 89 |
+
self.addr = ""
|
| 90 |
+
self.port = self.default_port
|
| 91 |
+
else:
|
| 92 |
+
m = re.match(naiveip_re, options["addrport"])
|
| 93 |
+
if m is None:
|
| 94 |
+
raise CommandError(
|
| 95 |
+
'"%s" is not a valid port number '
|
| 96 |
+
"or address:port pair." % options["addrport"]
|
| 97 |
+
)
|
| 98 |
+
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
|
| 99 |
+
if not self.port.isdigit():
|
| 100 |
+
raise CommandError("%r is not a valid port number." % self.port)
|
| 101 |
+
if self.addr:
|
| 102 |
+
if _ipv6:
|
| 103 |
+
self.addr = self.addr[1:-1]
|
| 104 |
+
self.use_ipv6 = True
|
| 105 |
+
self._raw_ipv6 = True
|
| 106 |
+
elif self.use_ipv6 and not _fqdn:
|
| 107 |
+
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
|
| 108 |
+
if not self.addr:
|
| 109 |
+
self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr
|
| 110 |
+
self._raw_ipv6 = self.use_ipv6
|
| 111 |
+
self.run(**options)
|
| 112 |
+
|
| 113 |
+
def run(self, **options):
|
| 114 |
+
"""Run the server, using the autoreloader if needed."""
|
| 115 |
+
use_reloader = options["use_reloader"]
|
| 116 |
+
|
| 117 |
+
if use_reloader:
|
| 118 |
+
autoreload.run_with_reloader(self.inner_run, **options)
|
| 119 |
+
else:
|
| 120 |
+
self.inner_run(None, **options)
|
| 121 |
+
|
| 122 |
+
def inner_run(self, *args, **options):
|
| 123 |
+
# If an exception was silenced in ManagementUtility.execute in order
|
| 124 |
+
# to be raised in the child process, raise it now.
|
| 125 |
+
autoreload.raise_last_exception()
|
| 126 |
+
|
| 127 |
+
threading = options["use_threading"]
|
| 128 |
+
# 'shutdown_message' is a stealth option.
|
| 129 |
+
shutdown_message = options.get("shutdown_message", "")
|
| 130 |
+
|
| 131 |
+
if not options["skip_checks"]:
|
| 132 |
+
self.stdout.write("Performing system checks...\n\n")
|
| 133 |
+
self.check(display_num_errors=True)
|
| 134 |
+
# Need to check migrations here, so can't use the
|
| 135 |
+
# requires_migrations_check attribute.
|
| 136 |
+
self.check_migrations()
|
| 137 |
+
|
| 138 |
+
try:
|
| 139 |
+
handler = self.get_handler(*args, **options)
|
| 140 |
+
run(
|
| 141 |
+
self.addr,
|
| 142 |
+
int(self.port),
|
| 143 |
+
handler,
|
| 144 |
+
ipv6=self.use_ipv6,
|
| 145 |
+
threading=threading,
|
| 146 |
+
on_bind=self.on_bind,
|
| 147 |
+
server_cls=self.server_cls,
|
| 148 |
+
)
|
| 149 |
+
except OSError as e:
|
| 150 |
+
# Use helpful error messages instead of ugly tracebacks.
|
| 151 |
+
ERRORS = {
|
| 152 |
+
errno.EACCES: "You don't have permission to access that port.",
|
| 153 |
+
errno.EADDRINUSE: "That port is already in use.",
|
| 154 |
+
errno.EADDRNOTAVAIL: "That IP address can't be assigned to.",
|
| 155 |
+
}
|
| 156 |
+
try:
|
| 157 |
+
error_text = ERRORS[e.errno]
|
| 158 |
+
except KeyError:
|
| 159 |
+
error_text = e
|
| 160 |
+
self.stderr.write("Error: %s" % error_text)
|
| 161 |
+
# Need to use an OS exit because sys.exit doesn't work in a thread
|
| 162 |
+
os._exit(1)
|
| 163 |
+
except KeyboardInterrupt:
|
| 164 |
+
if shutdown_message:
|
| 165 |
+
self.stdout.write(shutdown_message)
|
| 166 |
+
sys.exit(0)
|
| 167 |
+
|
| 168 |
+
def on_bind(self, server_port):
|
| 169 |
+
quit_command = "CTRL-BREAK" if sys.platform == "win32" else "CONTROL-C"
|
| 170 |
+
|
| 171 |
+
if self._raw_ipv6:
|
| 172 |
+
addr = f"[{self.addr}]"
|
| 173 |
+
elif self.addr == "0":
|
| 174 |
+
addr = "0.0.0.0"
|
| 175 |
+
else:
|
| 176 |
+
addr = self.addr
|
| 177 |
+
|
| 178 |
+
now = datetime.now().strftime("%B %d, %Y - %X")
|
| 179 |
+
version = self.get_version()
|
| 180 |
+
print(
|
| 181 |
+
f"{now}\n"
|
| 182 |
+
f"Django version {version}, using settings {settings.SETTINGS_MODULE!r}\n"
|
| 183 |
+
f"Starting development server at {self.protocol}://{addr}:{server_port}/\n"
|
| 184 |
+
f"Quit the server with {quit_command}.",
|
| 185 |
+
file=self.stdout,
|
| 186 |
+
)
|
testbed/django__django/django/core/management/commands/shell.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import select
|
| 3 |
+
import sys
|
| 4 |
+
import traceback
|
| 5 |
+
|
| 6 |
+
from django.core.management import BaseCommand, CommandError
|
| 7 |
+
from django.utils.datastructures import OrderedSet
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Command(BaseCommand):
|
| 11 |
+
help = (
|
| 12 |
+
"Runs a Python interactive interpreter. Tries to use IPython or "
|
| 13 |
+
"bpython, if one of them is available. Any standard input is executed "
|
| 14 |
+
"as code."
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
requires_system_checks = []
|
| 18 |
+
shells = ["ipython", "bpython", "python"]
|
| 19 |
+
|
| 20 |
+
def add_arguments(self, parser):
|
| 21 |
+
parser.add_argument(
|
| 22 |
+
"--no-startup",
|
| 23 |
+
action="store_true",
|
| 24 |
+
help=(
|
| 25 |
+
"When using plain Python, ignore the PYTHONSTARTUP environment "
|
| 26 |
+
"variable and ~/.pythonrc.py script."
|
| 27 |
+
),
|
| 28 |
+
)
|
| 29 |
+
parser.add_argument(
|
| 30 |
+
"-i",
|
| 31 |
+
"--interface",
|
| 32 |
+
choices=self.shells,
|
| 33 |
+
help=(
|
| 34 |
+
"Specify an interactive interpreter interface. Available options: "
|
| 35 |
+
'"ipython", "bpython", and "python"'
|
| 36 |
+
),
|
| 37 |
+
)
|
| 38 |
+
parser.add_argument(
|
| 39 |
+
"-c",
|
| 40 |
+
"--command",
|
| 41 |
+
help=(
|
| 42 |
+
"Instead of opening an interactive shell, run a command as Django and "
|
| 43 |
+
"exit."
|
| 44 |
+
),
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
def ipython(self, options):
|
| 48 |
+
from IPython import start_ipython
|
| 49 |
+
|
| 50 |
+
start_ipython(argv=[])
|
| 51 |
+
|
| 52 |
+
def bpython(self, options):
|
| 53 |
+
import bpython
|
| 54 |
+
|
| 55 |
+
bpython.embed()
|
| 56 |
+
|
| 57 |
+
def python(self, options):
|
| 58 |
+
import code
|
| 59 |
+
|
| 60 |
+
# Set up a dictionary to serve as the environment for the shell.
|
| 61 |
+
imported_objects = {}
|
| 62 |
+
|
| 63 |
+
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
|
| 64 |
+
# conventions and get $PYTHONSTARTUP first then .pythonrc.py.
|
| 65 |
+
if not options["no_startup"]:
|
| 66 |
+
for pythonrc in OrderedSet(
|
| 67 |
+
[os.environ.get("PYTHONSTARTUP"), os.path.expanduser("~/.pythonrc.py")]
|
| 68 |
+
):
|
| 69 |
+
if not pythonrc:
|
| 70 |
+
continue
|
| 71 |
+
if not os.path.isfile(pythonrc):
|
| 72 |
+
continue
|
| 73 |
+
with open(pythonrc) as handle:
|
| 74 |
+
pythonrc_code = handle.read()
|
| 75 |
+
# Match the behavior of the cpython shell where an error in
|
| 76 |
+
# PYTHONSTARTUP prints an exception and continues.
|
| 77 |
+
try:
|
| 78 |
+
exec(compile(pythonrc_code, pythonrc, "exec"), imported_objects)
|
| 79 |
+
except Exception:
|
| 80 |
+
traceback.print_exc()
|
| 81 |
+
|
| 82 |
+
# By default, this will set up readline to do tab completion and to read and
|
| 83 |
+
# write history to the .python_history file, but this can be overridden by
|
| 84 |
+
# $PYTHONSTARTUP or ~/.pythonrc.py.
|
| 85 |
+
try:
|
| 86 |
+
hook = sys.__interactivehook__
|
| 87 |
+
except AttributeError:
|
| 88 |
+
# Match the behavior of the cpython shell where a missing
|
| 89 |
+
# sys.__interactivehook__ is ignored.
|
| 90 |
+
pass
|
| 91 |
+
else:
|
| 92 |
+
try:
|
| 93 |
+
hook()
|
| 94 |
+
except Exception:
|
| 95 |
+
# Match the behavior of the cpython shell where an error in
|
| 96 |
+
# sys.__interactivehook__ prints a warning and the exception
|
| 97 |
+
# and continues.
|
| 98 |
+
print("Failed calling sys.__interactivehook__")
|
| 99 |
+
traceback.print_exc()
|
| 100 |
+
|
| 101 |
+
# Set up tab completion for objects imported by $PYTHONSTARTUP or
|
| 102 |
+
# ~/.pythonrc.py.
|
| 103 |
+
try:
|
| 104 |
+
import readline
|
| 105 |
+
import rlcompleter
|
| 106 |
+
|
| 107 |
+
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
|
| 108 |
+
except ImportError:
|
| 109 |
+
pass
|
| 110 |
+
|
| 111 |
+
# Start the interactive interpreter.
|
| 112 |
+
code.interact(local=imported_objects)
|
| 113 |
+
|
| 114 |
+
def handle(self, **options):
|
| 115 |
+
# Execute the command and exit.
|
| 116 |
+
if options["command"]:
|
| 117 |
+
exec(options["command"], globals())
|
| 118 |
+
return
|
| 119 |
+
|
| 120 |
+
# Execute stdin if it has anything to read and exit.
|
| 121 |
+
# Not supported on Windows due to select.select() limitations.
|
| 122 |
+
if (
|
| 123 |
+
sys.platform != "win32"
|
| 124 |
+
and not sys.stdin.isatty()
|
| 125 |
+
and select.select([sys.stdin], [], [], 0)[0]
|
| 126 |
+
):
|
| 127 |
+
exec(sys.stdin.read(), globals())
|
| 128 |
+
return
|
| 129 |
+
|
| 130 |
+
available_shells = (
|
| 131 |
+
[options["interface"]] if options["interface"] else self.shells
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
for shell in available_shells:
|
| 135 |
+
try:
|
| 136 |
+
return getattr(self, shell)(options)
|
| 137 |
+
except ImportError:
|
| 138 |
+
pass
|
| 139 |
+
raise CommandError("Couldn't import {} interface.".format(shell))
|
testbed/django__django/django/core/management/commands/showmigrations.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from django.apps import apps
|
| 4 |
+
from django.core.management.base import BaseCommand
|
| 5 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 6 |
+
from django.db.migrations.loader import MigrationLoader
|
| 7 |
+
from django.db.migrations.recorder import MigrationRecorder
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Command(BaseCommand):
|
| 11 |
+
help = "Shows all available migrations for the current project"
|
| 12 |
+
|
| 13 |
+
def add_arguments(self, parser):
|
| 14 |
+
parser.add_argument(
|
| 15 |
+
"app_label",
|
| 16 |
+
nargs="*",
|
| 17 |
+
help="App labels of applications to limit the output to.",
|
| 18 |
+
)
|
| 19 |
+
parser.add_argument(
|
| 20 |
+
"--database",
|
| 21 |
+
default=DEFAULT_DB_ALIAS,
|
| 22 |
+
help=(
|
| 23 |
+
"Nominates a database to show migrations for. Defaults to the "
|
| 24 |
+
'"default" database.'
|
| 25 |
+
),
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
formats = parser.add_mutually_exclusive_group()
|
| 29 |
+
formats.add_argument(
|
| 30 |
+
"--list",
|
| 31 |
+
"-l",
|
| 32 |
+
action="store_const",
|
| 33 |
+
dest="format",
|
| 34 |
+
const="list",
|
| 35 |
+
help=(
|
| 36 |
+
"Shows a list of all migrations and which are applied. "
|
| 37 |
+
"With a verbosity level of 2 or above, the applied datetimes "
|
| 38 |
+
"will be included."
|
| 39 |
+
),
|
| 40 |
+
)
|
| 41 |
+
formats.add_argument(
|
| 42 |
+
"--plan",
|
| 43 |
+
"-p",
|
| 44 |
+
action="store_const",
|
| 45 |
+
dest="format",
|
| 46 |
+
const="plan",
|
| 47 |
+
help=(
|
| 48 |
+
"Shows all migrations in the order they will be applied. With a "
|
| 49 |
+
"verbosity level of 2 or above all direct migration dependencies and "
|
| 50 |
+
"reverse dependencies (run_before) will be included."
|
| 51 |
+
),
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
parser.set_defaults(format="list")
|
| 55 |
+
|
| 56 |
+
def handle(self, *args, **options):
|
| 57 |
+
self.verbosity = options["verbosity"]
|
| 58 |
+
|
| 59 |
+
# Get the database we're operating from
|
| 60 |
+
db = options["database"]
|
| 61 |
+
connection = connections[db]
|
| 62 |
+
|
| 63 |
+
if options["format"] == "plan":
|
| 64 |
+
return self.show_plan(connection, options["app_label"])
|
| 65 |
+
else:
|
| 66 |
+
return self.show_list(connection, options["app_label"])
|
| 67 |
+
|
| 68 |
+
def _validate_app_names(self, loader, app_names):
|
| 69 |
+
has_bad_names = False
|
| 70 |
+
for app_name in app_names:
|
| 71 |
+
try:
|
| 72 |
+
apps.get_app_config(app_name)
|
| 73 |
+
except LookupError as err:
|
| 74 |
+
self.stderr.write(str(err))
|
| 75 |
+
has_bad_names = True
|
| 76 |
+
if has_bad_names:
|
| 77 |
+
sys.exit(2)
|
| 78 |
+
|
| 79 |
+
def show_list(self, connection, app_names=None):
|
| 80 |
+
"""
|
| 81 |
+
Show a list of all migrations on the system, or only those of
|
| 82 |
+
some named apps.
|
| 83 |
+
"""
|
| 84 |
+
# Load migrations from disk/DB
|
| 85 |
+
loader = MigrationLoader(connection, ignore_no_migrations=True)
|
| 86 |
+
recorder = MigrationRecorder(connection)
|
| 87 |
+
recorded_migrations = recorder.applied_migrations()
|
| 88 |
+
graph = loader.graph
|
| 89 |
+
# If we were passed a list of apps, validate it
|
| 90 |
+
if app_names:
|
| 91 |
+
self._validate_app_names(loader, app_names)
|
| 92 |
+
# Otherwise, show all apps in alphabetic order
|
| 93 |
+
else:
|
| 94 |
+
app_names = sorted(loader.migrated_apps)
|
| 95 |
+
# For each app, print its migrations in order from oldest (roots) to
|
| 96 |
+
# newest (leaves).
|
| 97 |
+
for app_name in app_names:
|
| 98 |
+
self.stdout.write(app_name, self.style.MIGRATE_LABEL)
|
| 99 |
+
shown = set()
|
| 100 |
+
for node in graph.leaf_nodes(app_name):
|
| 101 |
+
for plan_node in graph.forwards_plan(node):
|
| 102 |
+
if plan_node not in shown and plan_node[0] == app_name:
|
| 103 |
+
# Give it a nice title if it's a squashed one
|
| 104 |
+
title = plan_node[1]
|
| 105 |
+
if graph.nodes[plan_node].replaces:
|
| 106 |
+
title += " (%s squashed migrations)" % len(
|
| 107 |
+
graph.nodes[plan_node].replaces
|
| 108 |
+
)
|
| 109 |
+
applied_migration = loader.applied_migrations.get(plan_node)
|
| 110 |
+
# Mark it as applied/unapplied
|
| 111 |
+
if applied_migration:
|
| 112 |
+
if plan_node in recorded_migrations:
|
| 113 |
+
output = " [X] %s" % title
|
| 114 |
+
else:
|
| 115 |
+
title += " Run 'manage.py migrate' to finish recording."
|
| 116 |
+
output = " [-] %s" % title
|
| 117 |
+
if self.verbosity >= 2 and hasattr(
|
| 118 |
+
applied_migration, "applied"
|
| 119 |
+
):
|
| 120 |
+
output += (
|
| 121 |
+
" (applied at %s)"
|
| 122 |
+
% applied_migration.applied.strftime(
|
| 123 |
+
"%Y-%m-%d %H:%M:%S"
|
| 124 |
+
)
|
| 125 |
+
)
|
| 126 |
+
self.stdout.write(output)
|
| 127 |
+
else:
|
| 128 |
+
self.stdout.write(" [ ] %s" % title)
|
| 129 |
+
shown.add(plan_node)
|
| 130 |
+
# If we didn't print anything, then a small message
|
| 131 |
+
if not shown:
|
| 132 |
+
self.stdout.write(" (no migrations)", self.style.ERROR)
|
| 133 |
+
|
| 134 |
+
def show_plan(self, connection, app_names=None):
|
| 135 |
+
"""
|
| 136 |
+
Show all known migrations (or only those of the specified app_names)
|
| 137 |
+
in the order they will be applied.
|
| 138 |
+
"""
|
| 139 |
+
# Load migrations from disk/DB
|
| 140 |
+
loader = MigrationLoader(connection)
|
| 141 |
+
graph = loader.graph
|
| 142 |
+
if app_names:
|
| 143 |
+
self._validate_app_names(loader, app_names)
|
| 144 |
+
targets = [key for key in graph.leaf_nodes() if key[0] in app_names]
|
| 145 |
+
else:
|
| 146 |
+
targets = graph.leaf_nodes()
|
| 147 |
+
plan = []
|
| 148 |
+
seen = set()
|
| 149 |
+
|
| 150 |
+
# Generate the plan
|
| 151 |
+
for target in targets:
|
| 152 |
+
for migration in graph.forwards_plan(target):
|
| 153 |
+
if migration not in seen:
|
| 154 |
+
node = graph.node_map[migration]
|
| 155 |
+
plan.append(node)
|
| 156 |
+
seen.add(migration)
|
| 157 |
+
|
| 158 |
+
# Output
|
| 159 |
+
def print_deps(node):
|
| 160 |
+
out = []
|
| 161 |
+
for parent in sorted(node.parents):
|
| 162 |
+
out.append("%s.%s" % parent.key)
|
| 163 |
+
if out:
|
| 164 |
+
return " ... (%s)" % ", ".join(out)
|
| 165 |
+
return ""
|
| 166 |
+
|
| 167 |
+
for node in plan:
|
| 168 |
+
deps = ""
|
| 169 |
+
if self.verbosity >= 2:
|
| 170 |
+
deps = print_deps(node)
|
| 171 |
+
if node.key in loader.applied_migrations:
|
| 172 |
+
self.stdout.write("[X] %s.%s%s" % (node.key[0], node.key[1], deps))
|
| 173 |
+
else:
|
| 174 |
+
self.stdout.write("[ ] %s.%s%s" % (node.key[0], node.key[1], deps))
|
| 175 |
+
if not plan:
|
| 176 |
+
self.stdout.write("(no migrations)", self.style.ERROR)
|
testbed/django__django/django/core/management/commands/sqlflush.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.management.base import BaseCommand
|
| 2 |
+
from django.core.management.sql import sql_flush
|
| 3 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Command(BaseCommand):
|
| 7 |
+
help = (
|
| 8 |
+
"Returns a list of the SQL statements required to return all tables in "
|
| 9 |
+
"the database to the state they were in just after they were installed."
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
output_transaction = True
|
| 13 |
+
|
| 14 |
+
def add_arguments(self, parser):
|
| 15 |
+
super().add_arguments(parser)
|
| 16 |
+
parser.add_argument(
|
| 17 |
+
"--database",
|
| 18 |
+
default=DEFAULT_DB_ALIAS,
|
| 19 |
+
help=(
|
| 20 |
+
'Nominates a database to print the SQL for. Defaults to the "default" '
|
| 21 |
+
"database."
|
| 22 |
+
),
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def handle(self, **options):
|
| 26 |
+
sql_statements = sql_flush(self.style, connections[options["database"]])
|
| 27 |
+
if not sql_statements and options["verbosity"] >= 1:
|
| 28 |
+
self.stderr.write("No tables found.")
|
| 29 |
+
return "\n".join(sql_statements)
|
testbed/django__django/django/core/management/commands/sqlmigrate.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.apps import apps
|
| 2 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 3 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 4 |
+
from django.db.migrations.loader import AmbiguityError, MigrationLoader
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class Command(BaseCommand):
|
| 8 |
+
help = "Prints the SQL statements for the named migration."
|
| 9 |
+
|
| 10 |
+
output_transaction = True
|
| 11 |
+
|
| 12 |
+
def add_arguments(self, parser):
|
| 13 |
+
parser.add_argument(
|
| 14 |
+
"app_label", help="App label of the application containing the migration."
|
| 15 |
+
)
|
| 16 |
+
parser.add_argument(
|
| 17 |
+
"migration_name", help="Migration name to print the SQL for."
|
| 18 |
+
)
|
| 19 |
+
parser.add_argument(
|
| 20 |
+
"--database",
|
| 21 |
+
default=DEFAULT_DB_ALIAS,
|
| 22 |
+
help=(
|
| 23 |
+
'Nominates a database to create SQL for. Defaults to the "default" '
|
| 24 |
+
"database."
|
| 25 |
+
),
|
| 26 |
+
)
|
| 27 |
+
parser.add_argument(
|
| 28 |
+
"--backwards",
|
| 29 |
+
action="store_true",
|
| 30 |
+
help="Creates SQL to unapply the migration, rather than to apply it",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def execute(self, *args, **options):
|
| 34 |
+
# sqlmigrate doesn't support coloring its output but we need to force
|
| 35 |
+
# no_color=True so that the BEGIN/COMMIT statements added by
|
| 36 |
+
# output_transaction don't get colored either.
|
| 37 |
+
options["no_color"] = True
|
| 38 |
+
return super().execute(*args, **options)
|
| 39 |
+
|
| 40 |
+
def handle(self, *args, **options):
|
| 41 |
+
# Get the database we're operating from
|
| 42 |
+
connection = connections[options["database"]]
|
| 43 |
+
|
| 44 |
+
# Load up a loader to get all the migration data, but don't replace
|
| 45 |
+
# migrations.
|
| 46 |
+
loader = MigrationLoader(connection, replace_migrations=False)
|
| 47 |
+
|
| 48 |
+
# Resolve command-line arguments into a migration
|
| 49 |
+
app_label, migration_name = options["app_label"], options["migration_name"]
|
| 50 |
+
# Validate app_label
|
| 51 |
+
try:
|
| 52 |
+
apps.get_app_config(app_label)
|
| 53 |
+
except LookupError as err:
|
| 54 |
+
raise CommandError(str(err))
|
| 55 |
+
if app_label not in loader.migrated_apps:
|
| 56 |
+
raise CommandError("App '%s' does not have migrations" % app_label)
|
| 57 |
+
try:
|
| 58 |
+
migration = loader.get_migration_by_prefix(app_label, migration_name)
|
| 59 |
+
except AmbiguityError:
|
| 60 |
+
raise CommandError(
|
| 61 |
+
"More than one migration matches '%s' in app '%s'. Please be more "
|
| 62 |
+
"specific." % (migration_name, app_label)
|
| 63 |
+
)
|
| 64 |
+
except KeyError:
|
| 65 |
+
raise CommandError(
|
| 66 |
+
"Cannot find a migration matching '%s' from app '%s'. Is it in "
|
| 67 |
+
"INSTALLED_APPS?" % (migration_name, app_label)
|
| 68 |
+
)
|
| 69 |
+
target = (app_label, migration.name)
|
| 70 |
+
|
| 71 |
+
# Show begin/end around output for atomic migrations, if the database
|
| 72 |
+
# supports transactional DDL.
|
| 73 |
+
self.output_transaction = (
|
| 74 |
+
migration.atomic and connection.features.can_rollback_ddl
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
# Make a plan that represents just the requested migrations and show SQL
|
| 78 |
+
# for it
|
| 79 |
+
plan = [(loader.graph.nodes[target], options["backwards"])]
|
| 80 |
+
sql_statements = loader.collect_sql(plan)
|
| 81 |
+
if not sql_statements and options["verbosity"] >= 1:
|
| 82 |
+
self.stderr.write("No operations found.")
|
| 83 |
+
return "\n".join(sql_statements)
|
testbed/django__django/django/core/management/commands/sqlsequencereset.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.management.base import AppCommand
|
| 2 |
+
from django.db import DEFAULT_DB_ALIAS, connections
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class Command(AppCommand):
|
| 6 |
+
help = (
|
| 7 |
+
"Prints the SQL statements for resetting sequences for the given app name(s)."
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
output_transaction = True
|
| 11 |
+
|
| 12 |
+
def add_arguments(self, parser):
|
| 13 |
+
super().add_arguments(parser)
|
| 14 |
+
parser.add_argument(
|
| 15 |
+
"--database",
|
| 16 |
+
default=DEFAULT_DB_ALIAS,
|
| 17 |
+
help=(
|
| 18 |
+
'Nominates a database to print the SQL for. Defaults to the "default" '
|
| 19 |
+
"database."
|
| 20 |
+
),
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
def handle_app_config(self, app_config, **options):
|
| 24 |
+
if app_config.models_module is None:
|
| 25 |
+
return
|
| 26 |
+
connection = connections[options["database"]]
|
| 27 |
+
models = app_config.get_models(include_auto_created=True)
|
| 28 |
+
statements = connection.ops.sequence_reset_sql(self.style, models)
|
| 29 |
+
if not statements and options["verbosity"] >= 1:
|
| 30 |
+
self.stderr.write("No sequences found.")
|
| 31 |
+
return "\n".join(statements)
|
testbed/django__django/django/core/management/commands/squashmigrations.py
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
|
| 4 |
+
from django.apps import apps
|
| 5 |
+
from django.conf import settings
|
| 6 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 7 |
+
from django.core.management.utils import run_formatters
|
| 8 |
+
from django.db import DEFAULT_DB_ALIAS, connections, migrations
|
| 9 |
+
from django.db.migrations.loader import AmbiguityError, MigrationLoader
|
| 10 |
+
from django.db.migrations.migration import SwappableTuple
|
| 11 |
+
from django.db.migrations.optimizer import MigrationOptimizer
|
| 12 |
+
from django.db.migrations.writer import MigrationWriter
|
| 13 |
+
from django.utils.version import get_docs_version
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Command(BaseCommand):
|
| 17 |
+
help = (
|
| 18 |
+
"Squashes an existing set of migrations (from first until specified) into a "
|
| 19 |
+
"single new one."
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def add_arguments(self, parser):
|
| 23 |
+
parser.add_argument(
|
| 24 |
+
"app_label",
|
| 25 |
+
help="App label of the application to squash migrations for.",
|
| 26 |
+
)
|
| 27 |
+
parser.add_argument(
|
| 28 |
+
"start_migration_name",
|
| 29 |
+
nargs="?",
|
| 30 |
+
help=(
|
| 31 |
+
"Migrations will be squashed starting from and including this "
|
| 32 |
+
"migration."
|
| 33 |
+
),
|
| 34 |
+
)
|
| 35 |
+
parser.add_argument(
|
| 36 |
+
"migration_name",
|
| 37 |
+
help="Migrations will be squashed until and including this migration.",
|
| 38 |
+
)
|
| 39 |
+
parser.add_argument(
|
| 40 |
+
"--no-optimize",
|
| 41 |
+
action="store_true",
|
| 42 |
+
help="Do not try to optimize the squashed operations.",
|
| 43 |
+
)
|
| 44 |
+
parser.add_argument(
|
| 45 |
+
"--noinput",
|
| 46 |
+
"--no-input",
|
| 47 |
+
action="store_false",
|
| 48 |
+
dest="interactive",
|
| 49 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 50 |
+
)
|
| 51 |
+
parser.add_argument(
|
| 52 |
+
"--squashed-name",
|
| 53 |
+
help="Sets the name of the new squashed migration.",
|
| 54 |
+
)
|
| 55 |
+
parser.add_argument(
|
| 56 |
+
"--no-header",
|
| 57 |
+
action="store_false",
|
| 58 |
+
dest="include_header",
|
| 59 |
+
help="Do not add a header comment to the new squashed migration.",
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
def handle(self, **options):
|
| 63 |
+
self.verbosity = options["verbosity"]
|
| 64 |
+
self.interactive = options["interactive"]
|
| 65 |
+
app_label = options["app_label"]
|
| 66 |
+
start_migration_name = options["start_migration_name"]
|
| 67 |
+
migration_name = options["migration_name"]
|
| 68 |
+
no_optimize = options["no_optimize"]
|
| 69 |
+
squashed_name = options["squashed_name"]
|
| 70 |
+
include_header = options["include_header"]
|
| 71 |
+
# Validate app_label.
|
| 72 |
+
try:
|
| 73 |
+
apps.get_app_config(app_label)
|
| 74 |
+
except LookupError as err:
|
| 75 |
+
raise CommandError(str(err))
|
| 76 |
+
# Load the current graph state, check the app and migration they asked
|
| 77 |
+
# for exists.
|
| 78 |
+
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
|
| 79 |
+
if app_label not in loader.migrated_apps:
|
| 80 |
+
raise CommandError(
|
| 81 |
+
"App '%s' does not have migrations (so squashmigrations on "
|
| 82 |
+
"it makes no sense)" % app_label
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
migration = self.find_migration(loader, app_label, migration_name)
|
| 86 |
+
|
| 87 |
+
# Work out the list of predecessor migrations
|
| 88 |
+
migrations_to_squash = [
|
| 89 |
+
loader.get_migration(al, mn)
|
| 90 |
+
for al, mn in loader.graph.forwards_plan(
|
| 91 |
+
(migration.app_label, migration.name)
|
| 92 |
+
)
|
| 93 |
+
if al == migration.app_label
|
| 94 |
+
]
|
| 95 |
+
|
| 96 |
+
if start_migration_name:
|
| 97 |
+
start_migration = self.find_migration(
|
| 98 |
+
loader, app_label, start_migration_name
|
| 99 |
+
)
|
| 100 |
+
start = loader.get_migration(
|
| 101 |
+
start_migration.app_label, start_migration.name
|
| 102 |
+
)
|
| 103 |
+
try:
|
| 104 |
+
start_index = migrations_to_squash.index(start)
|
| 105 |
+
migrations_to_squash = migrations_to_squash[start_index:]
|
| 106 |
+
except ValueError:
|
| 107 |
+
raise CommandError(
|
| 108 |
+
"The migration '%s' cannot be found. Maybe it comes after "
|
| 109 |
+
"the migration '%s'?\n"
|
| 110 |
+
"Have a look at:\n"
|
| 111 |
+
" python manage.py showmigrations %s\n"
|
| 112 |
+
"to debug this issue." % (start_migration, migration, app_label)
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
# Tell them what we're doing and optionally ask if we should proceed
|
| 116 |
+
if self.verbosity > 0 or self.interactive:
|
| 117 |
+
self.stdout.write(
|
| 118 |
+
self.style.MIGRATE_HEADING("Will squash the following migrations:")
|
| 119 |
+
)
|
| 120 |
+
for migration in migrations_to_squash:
|
| 121 |
+
self.stdout.write(" - %s" % migration.name)
|
| 122 |
+
|
| 123 |
+
if self.interactive:
|
| 124 |
+
answer = None
|
| 125 |
+
while not answer or answer not in "yn":
|
| 126 |
+
answer = input("Do you wish to proceed? [yN] ")
|
| 127 |
+
if not answer:
|
| 128 |
+
answer = "n"
|
| 129 |
+
break
|
| 130 |
+
else:
|
| 131 |
+
answer = answer[0].lower()
|
| 132 |
+
if answer != "y":
|
| 133 |
+
return
|
| 134 |
+
|
| 135 |
+
# Load the operations from all those migrations and concat together,
|
| 136 |
+
# along with collecting external dependencies and detecting
|
| 137 |
+
# double-squashing
|
| 138 |
+
operations = []
|
| 139 |
+
dependencies = set()
|
| 140 |
+
# We need to take all dependencies from the first migration in the list
|
| 141 |
+
# as it may be 0002 depending on 0001
|
| 142 |
+
first_migration = True
|
| 143 |
+
for smigration in migrations_to_squash:
|
| 144 |
+
if smigration.replaces:
|
| 145 |
+
raise CommandError(
|
| 146 |
+
"You cannot squash squashed migrations! Please transition it to a "
|
| 147 |
+
"normal migration first: https://docs.djangoproject.com/en/%s/"
|
| 148 |
+
"topics/migrations/#squashing-migrations" % get_docs_version()
|
| 149 |
+
)
|
| 150 |
+
operations.extend(smigration.operations)
|
| 151 |
+
for dependency in smigration.dependencies:
|
| 152 |
+
if isinstance(dependency, SwappableTuple):
|
| 153 |
+
if settings.AUTH_USER_MODEL == dependency.setting:
|
| 154 |
+
dependencies.add(("__setting__", "AUTH_USER_MODEL"))
|
| 155 |
+
else:
|
| 156 |
+
dependencies.add(dependency)
|
| 157 |
+
elif dependency[0] != smigration.app_label or first_migration:
|
| 158 |
+
dependencies.add(dependency)
|
| 159 |
+
first_migration = False
|
| 160 |
+
|
| 161 |
+
if no_optimize:
|
| 162 |
+
if self.verbosity > 0:
|
| 163 |
+
self.stdout.write(
|
| 164 |
+
self.style.MIGRATE_HEADING("(Skipping optimization.)")
|
| 165 |
+
)
|
| 166 |
+
new_operations = operations
|
| 167 |
+
else:
|
| 168 |
+
if self.verbosity > 0:
|
| 169 |
+
self.stdout.write(self.style.MIGRATE_HEADING("Optimizing..."))
|
| 170 |
+
|
| 171 |
+
optimizer = MigrationOptimizer()
|
| 172 |
+
new_operations = optimizer.optimize(operations, migration.app_label)
|
| 173 |
+
|
| 174 |
+
if self.verbosity > 0:
|
| 175 |
+
if len(new_operations) == len(operations):
|
| 176 |
+
self.stdout.write(" No optimizations possible.")
|
| 177 |
+
else:
|
| 178 |
+
self.stdout.write(
|
| 179 |
+
" Optimized from %s operations to %s operations."
|
| 180 |
+
% (len(operations), len(new_operations))
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
# Work out the value of replaces (any squashed ones we're re-squashing)
|
| 184 |
+
# need to feed their replaces into ours
|
| 185 |
+
replaces = []
|
| 186 |
+
for migration in migrations_to_squash:
|
| 187 |
+
if migration.replaces:
|
| 188 |
+
replaces.extend(migration.replaces)
|
| 189 |
+
else:
|
| 190 |
+
replaces.append((migration.app_label, migration.name))
|
| 191 |
+
|
| 192 |
+
# Make a new migration with those operations
|
| 193 |
+
subclass = type(
|
| 194 |
+
"Migration",
|
| 195 |
+
(migrations.Migration,),
|
| 196 |
+
{
|
| 197 |
+
"dependencies": dependencies,
|
| 198 |
+
"operations": new_operations,
|
| 199 |
+
"replaces": replaces,
|
| 200 |
+
},
|
| 201 |
+
)
|
| 202 |
+
if start_migration_name:
|
| 203 |
+
if squashed_name:
|
| 204 |
+
# Use the name from --squashed-name.
|
| 205 |
+
prefix, _ = start_migration.name.split("_", 1)
|
| 206 |
+
name = "%s_%s" % (prefix, squashed_name)
|
| 207 |
+
else:
|
| 208 |
+
# Generate a name.
|
| 209 |
+
name = "%s_squashed_%s" % (start_migration.name, migration.name)
|
| 210 |
+
new_migration = subclass(name, app_label)
|
| 211 |
+
else:
|
| 212 |
+
name = "0001_%s" % (squashed_name or "squashed_%s" % migration.name)
|
| 213 |
+
new_migration = subclass(name, app_label)
|
| 214 |
+
new_migration.initial = True
|
| 215 |
+
|
| 216 |
+
# Write out the new migration file
|
| 217 |
+
writer = MigrationWriter(new_migration, include_header)
|
| 218 |
+
if os.path.exists(writer.path):
|
| 219 |
+
raise CommandError(
|
| 220 |
+
f"Migration {new_migration.name} already exists. Use a different name."
|
| 221 |
+
)
|
| 222 |
+
with open(writer.path, "w", encoding="utf-8") as fh:
|
| 223 |
+
fh.write(writer.as_string())
|
| 224 |
+
run_formatters([writer.path])
|
| 225 |
+
|
| 226 |
+
if self.verbosity > 0:
|
| 227 |
+
self.stdout.write(
|
| 228 |
+
self.style.MIGRATE_HEADING(
|
| 229 |
+
"Created new squashed migration %s" % writer.path
|
| 230 |
+
)
|
| 231 |
+
+ "\n"
|
| 232 |
+
" You should commit this migration but leave the old ones in place;\n"
|
| 233 |
+
" the new migration will be used for new installs. Once you are sure\n"
|
| 234 |
+
" all instances of the codebase have applied the migrations you "
|
| 235 |
+
"squashed,\n"
|
| 236 |
+
" you can delete them."
|
| 237 |
+
)
|
| 238 |
+
if writer.needs_manual_porting:
|
| 239 |
+
self.stdout.write(
|
| 240 |
+
self.style.MIGRATE_HEADING("Manual porting required") + "\n"
|
| 241 |
+
" Your migrations contained functions that must be manually "
|
| 242 |
+
"copied over,\n"
|
| 243 |
+
" as we could not safely copy their implementation.\n"
|
| 244 |
+
" See the comment at the top of the squashed migration for "
|
| 245 |
+
"details."
|
| 246 |
+
)
|
| 247 |
+
if shutil.which("black"):
|
| 248 |
+
self.stdout.write(
|
| 249 |
+
self.style.WARNING(
|
| 250 |
+
"Squashed migration couldn't be formatted using the "
|
| 251 |
+
'"black" command. You can call it manually.'
|
| 252 |
+
)
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
def find_migration(self, loader, app_label, name):
|
| 256 |
+
try:
|
| 257 |
+
return loader.get_migration_by_prefix(app_label, name)
|
| 258 |
+
except AmbiguityError:
|
| 259 |
+
raise CommandError(
|
| 260 |
+
"More than one migration matches '%s' in app '%s'. Please be "
|
| 261 |
+
"more specific." % (name, app_label)
|
| 262 |
+
)
|
| 263 |
+
except KeyError:
|
| 264 |
+
raise CommandError(
|
| 265 |
+
"Cannot find a migration matching '%s' from app '%s'."
|
| 266 |
+
% (name, app_label)
|
| 267 |
+
)
|
testbed/django__django/django/core/management/commands/startapp.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.management.templates import TemplateCommand
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class Command(TemplateCommand):
|
| 5 |
+
help = (
|
| 6 |
+
"Creates a Django app directory structure for the given app name in "
|
| 7 |
+
"the current directory or optionally in the given directory."
|
| 8 |
+
)
|
| 9 |
+
missing_args_message = "You must provide an application name."
|
| 10 |
+
|
| 11 |
+
def handle(self, **options):
|
| 12 |
+
app_name = options.pop("name")
|
| 13 |
+
target = options.pop("directory")
|
| 14 |
+
super().handle("app", app_name, target, **options)
|
testbed/django__django/django/core/management/commands/startproject.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.checks.security.base import SECRET_KEY_INSECURE_PREFIX
|
| 2 |
+
from django.core.management.templates import TemplateCommand
|
| 3 |
+
|
| 4 |
+
from ..utils import get_random_secret_key
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class Command(TemplateCommand):
|
| 8 |
+
help = (
|
| 9 |
+
"Creates a Django project directory structure for the given project "
|
| 10 |
+
"name in the current directory or optionally in the given directory."
|
| 11 |
+
)
|
| 12 |
+
missing_args_message = "You must provide a project name."
|
| 13 |
+
|
| 14 |
+
def handle(self, **options):
|
| 15 |
+
project_name = options.pop("name")
|
| 16 |
+
target = options.pop("directory")
|
| 17 |
+
|
| 18 |
+
# Create a random SECRET_KEY to put it in the main settings.
|
| 19 |
+
options["secret_key"] = SECRET_KEY_INSECURE_PREFIX + get_random_secret_key()
|
| 20 |
+
|
| 21 |
+
super().handle("project", project_name, target, **options)
|
testbed/django__django/django/core/management/commands/test.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from django.conf import settings
|
| 4 |
+
from django.core.management.base import BaseCommand
|
| 5 |
+
from django.core.management.utils import get_command_line_option
|
| 6 |
+
from django.test.runner import get_max_test_processes
|
| 7 |
+
from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Command(BaseCommand):
|
| 11 |
+
help = "Discover and run tests in the specified modules or the current directory."
|
| 12 |
+
|
| 13 |
+
# DiscoverRunner runs the checks after databases are set up.
|
| 14 |
+
requires_system_checks = []
|
| 15 |
+
test_runner = None
|
| 16 |
+
|
| 17 |
+
def run_from_argv(self, argv):
|
| 18 |
+
"""
|
| 19 |
+
Pre-parse the command line to extract the value of the --testrunner
|
| 20 |
+
option. This allows a test runner to define additional command line
|
| 21 |
+
arguments.
|
| 22 |
+
"""
|
| 23 |
+
self.test_runner = get_command_line_option(argv, "--testrunner")
|
| 24 |
+
super().run_from_argv(argv)
|
| 25 |
+
|
| 26 |
+
def add_arguments(self, parser):
|
| 27 |
+
parser.add_argument(
|
| 28 |
+
"args",
|
| 29 |
+
metavar="test_label",
|
| 30 |
+
nargs="*",
|
| 31 |
+
help=(
|
| 32 |
+
"Module paths to test; can be modulename, modulename.TestCase or "
|
| 33 |
+
"modulename.TestCase.test_method"
|
| 34 |
+
),
|
| 35 |
+
)
|
| 36 |
+
parser.add_argument(
|
| 37 |
+
"--noinput",
|
| 38 |
+
"--no-input",
|
| 39 |
+
action="store_false",
|
| 40 |
+
dest="interactive",
|
| 41 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 42 |
+
)
|
| 43 |
+
parser.add_argument(
|
| 44 |
+
"--failfast",
|
| 45 |
+
action="store_true",
|
| 46 |
+
help="Tells Django to stop running the test suite after first failed test.",
|
| 47 |
+
)
|
| 48 |
+
parser.add_argument(
|
| 49 |
+
"--testrunner",
|
| 50 |
+
help="Tells Django to use specified test runner class instead of "
|
| 51 |
+
"the one specified by the TEST_RUNNER setting.",
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
test_runner_class = get_runner(settings, self.test_runner)
|
| 55 |
+
|
| 56 |
+
if hasattr(test_runner_class, "add_arguments"):
|
| 57 |
+
test_runner_class.add_arguments(parser)
|
| 58 |
+
|
| 59 |
+
def handle(self, *test_labels, **options):
|
| 60 |
+
TestRunner = get_runner(settings, options["testrunner"])
|
| 61 |
+
|
| 62 |
+
time_keeper = TimeKeeper() if options.get("timing", False) else NullTimeKeeper()
|
| 63 |
+
parallel = options.get("parallel")
|
| 64 |
+
if parallel == "auto":
|
| 65 |
+
options["parallel"] = get_max_test_processes()
|
| 66 |
+
test_runner = TestRunner(**options)
|
| 67 |
+
with time_keeper.timed("Total run"):
|
| 68 |
+
failures = test_runner.run_tests(test_labels)
|
| 69 |
+
time_keeper.print_results()
|
| 70 |
+
if failures:
|
| 71 |
+
sys.exit(1)
|
testbed/django__django/django/core/management/commands/testserver.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.management import call_command
|
| 2 |
+
from django.core.management.base import BaseCommand
|
| 3 |
+
from django.db import connection
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Command(BaseCommand):
|
| 7 |
+
help = "Runs a development server with data from the given fixture(s)."
|
| 8 |
+
|
| 9 |
+
requires_system_checks = []
|
| 10 |
+
|
| 11 |
+
def add_arguments(self, parser):
|
| 12 |
+
parser.add_argument(
|
| 13 |
+
"args",
|
| 14 |
+
metavar="fixture",
|
| 15 |
+
nargs="*",
|
| 16 |
+
help="Path(s) to fixtures to load before running the server.",
|
| 17 |
+
)
|
| 18 |
+
parser.add_argument(
|
| 19 |
+
"--noinput",
|
| 20 |
+
"--no-input",
|
| 21 |
+
action="store_false",
|
| 22 |
+
dest="interactive",
|
| 23 |
+
help="Tells Django to NOT prompt the user for input of any kind.",
|
| 24 |
+
)
|
| 25 |
+
parser.add_argument(
|
| 26 |
+
"--addrport",
|
| 27 |
+
default="",
|
| 28 |
+
help="Port number or ipaddr:port to run the server on.",
|
| 29 |
+
)
|
| 30 |
+
parser.add_argument(
|
| 31 |
+
"--ipv6",
|
| 32 |
+
"-6",
|
| 33 |
+
action="store_true",
|
| 34 |
+
dest="use_ipv6",
|
| 35 |
+
help="Tells Django to use an IPv6 address.",
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
def handle(self, *fixture_labels, **options):
|
| 39 |
+
verbosity = options["verbosity"]
|
| 40 |
+
interactive = options["interactive"]
|
| 41 |
+
|
| 42 |
+
# Create a test database.
|
| 43 |
+
db_name = connection.creation.create_test_db(
|
| 44 |
+
verbosity=verbosity, autoclobber=not interactive, serialize=False
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# Import the fixture data into the test database.
|
| 48 |
+
call_command("loaddata", *fixture_labels, **{"verbosity": verbosity})
|
| 49 |
+
|
| 50 |
+
# Run the development server. Turn off auto-reloading because it causes
|
| 51 |
+
# a strange error -- it causes this handle() method to be called
|
| 52 |
+
# multiple times.
|
| 53 |
+
shutdown_message = (
|
| 54 |
+
"\nServer stopped.\nNote that the test database, %r, has not been "
|
| 55 |
+
"deleted. You can explore it on your own." % db_name
|
| 56 |
+
)
|
| 57 |
+
use_threading = connection.features.test_db_allows_multiple_connections
|
| 58 |
+
call_command(
|
| 59 |
+
"runserver",
|
| 60 |
+
addrport=options["addrport"],
|
| 61 |
+
shutdown_message=shutdown_message,
|
| 62 |
+
use_reloader=False,
|
| 63 |
+
use_ipv6=options["use_ipv6"],
|
| 64 |
+
use_threading=use_threading,
|
| 65 |
+
)
|
testbed/django__django/django/core/management/sql.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from django.apps import apps
|
| 4 |
+
from django.db import models
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def sql_flush(style, connection, reset_sequences=True, allow_cascade=False):
|
| 8 |
+
"""
|
| 9 |
+
Return a list of the SQL statements used to flush the database.
|
| 10 |
+
"""
|
| 11 |
+
tables = connection.introspection.django_table_names(
|
| 12 |
+
only_existing=True, include_views=False
|
| 13 |
+
)
|
| 14 |
+
return connection.ops.sql_flush(
|
| 15 |
+
style,
|
| 16 |
+
tables,
|
| 17 |
+
reset_sequences=reset_sequences,
|
| 18 |
+
allow_cascade=allow_cascade,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs):
|
| 23 |
+
# Emit the pre_migrate signal for every application.
|
| 24 |
+
for app_config in apps.get_app_configs():
|
| 25 |
+
if app_config.models_module is None:
|
| 26 |
+
continue
|
| 27 |
+
if verbosity >= 2:
|
| 28 |
+
stdout = kwargs.get("stdout", sys.stdout)
|
| 29 |
+
stdout.write(
|
| 30 |
+
"Running pre-migrate handlers for application %s" % app_config.label
|
| 31 |
+
)
|
| 32 |
+
models.signals.pre_migrate.send(
|
| 33 |
+
sender=app_config,
|
| 34 |
+
app_config=app_config,
|
| 35 |
+
verbosity=verbosity,
|
| 36 |
+
interactive=interactive,
|
| 37 |
+
using=db,
|
| 38 |
+
**kwargs,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def emit_post_migrate_signal(verbosity, interactive, db, **kwargs):
|
| 43 |
+
# Emit the post_migrate signal for every application.
|
| 44 |
+
for app_config in apps.get_app_configs():
|
| 45 |
+
if app_config.models_module is None:
|
| 46 |
+
continue
|
| 47 |
+
if verbosity >= 2:
|
| 48 |
+
stdout = kwargs.get("stdout", sys.stdout)
|
| 49 |
+
stdout.write(
|
| 50 |
+
"Running post-migrate handlers for application %s" % app_config.label
|
| 51 |
+
)
|
| 52 |
+
models.signals.post_migrate.send(
|
| 53 |
+
sender=app_config,
|
| 54 |
+
app_config=app_config,
|
| 55 |
+
verbosity=verbosity,
|
| 56 |
+
interactive=interactive,
|
| 57 |
+
using=db,
|
| 58 |
+
**kwargs,
|
| 59 |
+
)
|
testbed/django__django/django/core/management/templates.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
import mimetypes
|
| 3 |
+
import os
|
| 4 |
+
import posixpath
|
| 5 |
+
import shutil
|
| 6 |
+
import stat
|
| 7 |
+
import tempfile
|
| 8 |
+
from importlib import import_module
|
| 9 |
+
from urllib.request import build_opener
|
| 10 |
+
|
| 11 |
+
import django
|
| 12 |
+
from django.conf import settings
|
| 13 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 14 |
+
from django.core.management.utils import (
|
| 15 |
+
find_formatters,
|
| 16 |
+
handle_extensions,
|
| 17 |
+
run_formatters,
|
| 18 |
+
)
|
| 19 |
+
from django.template import Context, Engine
|
| 20 |
+
from django.utils import archive
|
| 21 |
+
from django.utils.http import parse_header_parameters
|
| 22 |
+
from django.utils.version import get_docs_version
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class TemplateCommand(BaseCommand):
|
| 26 |
+
"""
|
| 27 |
+
Copy either a Django application layout template or a Django project
|
| 28 |
+
layout template into the specified directory.
|
| 29 |
+
|
| 30 |
+
:param style: A color style object (see django.core.management.color).
|
| 31 |
+
:param app_or_project: The string 'app' or 'project'.
|
| 32 |
+
:param name: The name of the application or project.
|
| 33 |
+
:param directory: The directory to which the template should be copied.
|
| 34 |
+
:param options: The additional variables passed to project or app templates
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
requires_system_checks = []
|
| 38 |
+
# The supported URL schemes
|
| 39 |
+
url_schemes = ["http", "https", "ftp"]
|
| 40 |
+
# Rewrite the following suffixes when determining the target filename.
|
| 41 |
+
rewrite_template_suffixes = (
|
| 42 |
+
# Allow shipping invalid .py files without byte-compilation.
|
| 43 |
+
(".py-tpl", ".py"),
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
def add_arguments(self, parser):
|
| 47 |
+
parser.add_argument("name", help="Name of the application or project.")
|
| 48 |
+
parser.add_argument(
|
| 49 |
+
"directory", nargs="?", help="Optional destination directory"
|
| 50 |
+
)
|
| 51 |
+
parser.add_argument(
|
| 52 |
+
"--template", help="The path or URL to load the template from."
|
| 53 |
+
)
|
| 54 |
+
parser.add_argument(
|
| 55 |
+
"--extension",
|
| 56 |
+
"-e",
|
| 57 |
+
dest="extensions",
|
| 58 |
+
action="append",
|
| 59 |
+
default=["py"],
|
| 60 |
+
help='The file extension(s) to render (default: "py"). '
|
| 61 |
+
"Separate multiple extensions with commas, or use "
|
| 62 |
+
"-e multiple times.",
|
| 63 |
+
)
|
| 64 |
+
parser.add_argument(
|
| 65 |
+
"--name",
|
| 66 |
+
"-n",
|
| 67 |
+
dest="files",
|
| 68 |
+
action="append",
|
| 69 |
+
default=[],
|
| 70 |
+
help="The file name(s) to render. Separate multiple file names "
|
| 71 |
+
"with commas, or use -n multiple times.",
|
| 72 |
+
)
|
| 73 |
+
parser.add_argument(
|
| 74 |
+
"--exclude",
|
| 75 |
+
"-x",
|
| 76 |
+
action="append",
|
| 77 |
+
default=argparse.SUPPRESS,
|
| 78 |
+
nargs="?",
|
| 79 |
+
const="",
|
| 80 |
+
help=(
|
| 81 |
+
"The directory name(s) to exclude, in addition to .git and "
|
| 82 |
+
"__pycache__. Can be used multiple times."
|
| 83 |
+
),
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
def handle(self, app_or_project, name, target=None, **options):
|
| 87 |
+
self.app_or_project = app_or_project
|
| 88 |
+
self.a_or_an = "an" if app_or_project == "app" else "a"
|
| 89 |
+
self.paths_to_remove = []
|
| 90 |
+
self.verbosity = options["verbosity"]
|
| 91 |
+
|
| 92 |
+
self.validate_name(name)
|
| 93 |
+
|
| 94 |
+
# if some directory is given, make sure it's nicely expanded
|
| 95 |
+
if target is None:
|
| 96 |
+
top_dir = os.path.join(os.getcwd(), name)
|
| 97 |
+
try:
|
| 98 |
+
os.makedirs(top_dir)
|
| 99 |
+
except FileExistsError:
|
| 100 |
+
raise CommandError("'%s' already exists" % top_dir)
|
| 101 |
+
except OSError as e:
|
| 102 |
+
raise CommandError(e)
|
| 103 |
+
else:
|
| 104 |
+
top_dir = os.path.abspath(os.path.expanduser(target))
|
| 105 |
+
if app_or_project == "app":
|
| 106 |
+
self.validate_name(os.path.basename(top_dir), "directory")
|
| 107 |
+
if not os.path.exists(top_dir):
|
| 108 |
+
raise CommandError(
|
| 109 |
+
"Destination directory '%s' does not "
|
| 110 |
+
"exist, please create it first." % top_dir
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
# Find formatters, which are external executables, before input
|
| 114 |
+
# from the templates can sneak into the path.
|
| 115 |
+
formatter_paths = find_formatters()
|
| 116 |
+
|
| 117 |
+
extensions = tuple(handle_extensions(options["extensions"]))
|
| 118 |
+
extra_files = []
|
| 119 |
+
excluded_directories = [".git", "__pycache__"]
|
| 120 |
+
for file in options["files"]:
|
| 121 |
+
extra_files.extend(map(lambda x: x.strip(), file.split(",")))
|
| 122 |
+
if exclude := options.get("exclude"):
|
| 123 |
+
for directory in exclude:
|
| 124 |
+
excluded_directories.append(directory.strip())
|
| 125 |
+
if self.verbosity >= 2:
|
| 126 |
+
self.stdout.write(
|
| 127 |
+
"Rendering %s template files with extensions: %s"
|
| 128 |
+
% (app_or_project, ", ".join(extensions))
|
| 129 |
+
)
|
| 130 |
+
self.stdout.write(
|
| 131 |
+
"Rendering %s template files with filenames: %s"
|
| 132 |
+
% (app_or_project, ", ".join(extra_files))
|
| 133 |
+
)
|
| 134 |
+
base_name = "%s_name" % app_or_project
|
| 135 |
+
base_subdir = "%s_template" % app_or_project
|
| 136 |
+
base_directory = "%s_directory" % app_or_project
|
| 137 |
+
camel_case_name = "camel_case_%s_name" % app_or_project
|
| 138 |
+
camel_case_value = "".join(x for x in name.title() if x != "_")
|
| 139 |
+
|
| 140 |
+
context = Context(
|
| 141 |
+
{
|
| 142 |
+
**options,
|
| 143 |
+
base_name: name,
|
| 144 |
+
base_directory: top_dir,
|
| 145 |
+
camel_case_name: camel_case_value,
|
| 146 |
+
"docs_version": get_docs_version(),
|
| 147 |
+
"django_version": django.__version__,
|
| 148 |
+
},
|
| 149 |
+
autoescape=False,
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
# Setup a stub settings environment for template rendering
|
| 153 |
+
if not settings.configured:
|
| 154 |
+
settings.configure()
|
| 155 |
+
django.setup()
|
| 156 |
+
|
| 157 |
+
template_dir = self.handle_template(options["template"], base_subdir)
|
| 158 |
+
prefix_length = len(template_dir) + 1
|
| 159 |
+
|
| 160 |
+
for root, dirs, files in os.walk(template_dir):
|
| 161 |
+
path_rest = root[prefix_length:]
|
| 162 |
+
relative_dir = path_rest.replace(base_name, name)
|
| 163 |
+
if relative_dir:
|
| 164 |
+
target_dir = os.path.join(top_dir, relative_dir)
|
| 165 |
+
os.makedirs(target_dir, exist_ok=True)
|
| 166 |
+
|
| 167 |
+
for dirname in dirs[:]:
|
| 168 |
+
if "exclude" not in options:
|
| 169 |
+
if dirname.startswith(".") or dirname == "__pycache__":
|
| 170 |
+
dirs.remove(dirname)
|
| 171 |
+
elif dirname in excluded_directories:
|
| 172 |
+
dirs.remove(dirname)
|
| 173 |
+
|
| 174 |
+
for filename in files:
|
| 175 |
+
if filename.endswith((".pyo", ".pyc", ".py.class")):
|
| 176 |
+
# Ignore some files as they cause various breakages.
|
| 177 |
+
continue
|
| 178 |
+
old_path = os.path.join(root, filename)
|
| 179 |
+
new_path = os.path.join(
|
| 180 |
+
top_dir, relative_dir, filename.replace(base_name, name)
|
| 181 |
+
)
|
| 182 |
+
for old_suffix, new_suffix in self.rewrite_template_suffixes:
|
| 183 |
+
if new_path.endswith(old_suffix):
|
| 184 |
+
new_path = new_path.removesuffix(old_suffix) + new_suffix
|
| 185 |
+
break # Only rewrite once
|
| 186 |
+
|
| 187 |
+
if os.path.exists(new_path):
|
| 188 |
+
raise CommandError(
|
| 189 |
+
"%s already exists. Overlaying %s %s into an existing "
|
| 190 |
+
"directory won't replace conflicting files."
|
| 191 |
+
% (
|
| 192 |
+
new_path,
|
| 193 |
+
self.a_or_an,
|
| 194 |
+
app_or_project,
|
| 195 |
+
)
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
# Only render the Python files, as we don't want to
|
| 199 |
+
# accidentally render Django templates files
|
| 200 |
+
if new_path.endswith(extensions) or filename in extra_files:
|
| 201 |
+
with open(old_path, encoding="utf-8") as template_file:
|
| 202 |
+
content = template_file.read()
|
| 203 |
+
template = Engine().from_string(content)
|
| 204 |
+
content = template.render(context)
|
| 205 |
+
with open(new_path, "w", encoding="utf-8") as new_file:
|
| 206 |
+
new_file.write(content)
|
| 207 |
+
else:
|
| 208 |
+
shutil.copyfile(old_path, new_path)
|
| 209 |
+
|
| 210 |
+
if self.verbosity >= 2:
|
| 211 |
+
self.stdout.write("Creating %s" % new_path)
|
| 212 |
+
try:
|
| 213 |
+
self.apply_umask(old_path, new_path)
|
| 214 |
+
self.make_writeable(new_path)
|
| 215 |
+
except OSError:
|
| 216 |
+
self.stderr.write(
|
| 217 |
+
"Notice: Couldn't set permission bits on %s. You're "
|
| 218 |
+
"probably using an uncommon filesystem setup. No "
|
| 219 |
+
"problem." % new_path,
|
| 220 |
+
self.style.NOTICE,
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
if self.paths_to_remove:
|
| 224 |
+
if self.verbosity >= 2:
|
| 225 |
+
self.stdout.write("Cleaning up temporary files.")
|
| 226 |
+
for path_to_remove in self.paths_to_remove:
|
| 227 |
+
if os.path.isfile(path_to_remove):
|
| 228 |
+
os.remove(path_to_remove)
|
| 229 |
+
else:
|
| 230 |
+
shutil.rmtree(path_to_remove)
|
| 231 |
+
|
| 232 |
+
run_formatters([top_dir], **formatter_paths)
|
| 233 |
+
|
| 234 |
+
def handle_template(self, template, subdir):
|
| 235 |
+
"""
|
| 236 |
+
Determine where the app or project templates are.
|
| 237 |
+
Use django.__path__[0] as the default because the Django install
|
| 238 |
+
directory isn't known.
|
| 239 |
+
"""
|
| 240 |
+
if template is None:
|
| 241 |
+
return os.path.join(django.__path__[0], "conf", subdir)
|
| 242 |
+
else:
|
| 243 |
+
template = template.removeprefix("file://")
|
| 244 |
+
expanded_template = os.path.expanduser(template)
|
| 245 |
+
expanded_template = os.path.normpath(expanded_template)
|
| 246 |
+
if os.path.isdir(expanded_template):
|
| 247 |
+
return expanded_template
|
| 248 |
+
if self.is_url(template):
|
| 249 |
+
# downloads the file and returns the path
|
| 250 |
+
absolute_path = self.download(template)
|
| 251 |
+
else:
|
| 252 |
+
absolute_path = os.path.abspath(expanded_template)
|
| 253 |
+
if os.path.exists(absolute_path):
|
| 254 |
+
return self.extract(absolute_path)
|
| 255 |
+
|
| 256 |
+
raise CommandError(
|
| 257 |
+
"couldn't handle %s template %s." % (self.app_or_project, template)
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
def validate_name(self, name, name_or_dir="name"):
|
| 261 |
+
if name is None:
|
| 262 |
+
raise CommandError(
|
| 263 |
+
"you must provide {an} {app} name".format(
|
| 264 |
+
an=self.a_or_an,
|
| 265 |
+
app=self.app_or_project,
|
| 266 |
+
)
|
| 267 |
+
)
|
| 268 |
+
# Check it's a valid directory name.
|
| 269 |
+
if not name.isidentifier():
|
| 270 |
+
raise CommandError(
|
| 271 |
+
"'{name}' is not a valid {app} {type}. Please make sure the "
|
| 272 |
+
"{type} is a valid identifier.".format(
|
| 273 |
+
name=name,
|
| 274 |
+
app=self.app_or_project,
|
| 275 |
+
type=name_or_dir,
|
| 276 |
+
)
|
| 277 |
+
)
|
| 278 |
+
# Check it cannot be imported.
|
| 279 |
+
try:
|
| 280 |
+
import_module(name)
|
| 281 |
+
except ImportError:
|
| 282 |
+
pass
|
| 283 |
+
else:
|
| 284 |
+
raise CommandError(
|
| 285 |
+
"'{name}' conflicts with the name of an existing Python "
|
| 286 |
+
"module and cannot be used as {an} {app} {type}. Please try "
|
| 287 |
+
"another {type}.".format(
|
| 288 |
+
name=name,
|
| 289 |
+
an=self.a_or_an,
|
| 290 |
+
app=self.app_or_project,
|
| 291 |
+
type=name_or_dir,
|
| 292 |
+
)
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
def download(self, url):
|
| 296 |
+
"""
|
| 297 |
+
Download the given URL and return the file name.
|
| 298 |
+
"""
|
| 299 |
+
|
| 300 |
+
def cleanup_url(url):
|
| 301 |
+
tmp = url.rstrip("/")
|
| 302 |
+
filename = tmp.split("/")[-1]
|
| 303 |
+
if url.endswith("/"):
|
| 304 |
+
display_url = tmp + "/"
|
| 305 |
+
else:
|
| 306 |
+
display_url = url
|
| 307 |
+
return filename, display_url
|
| 308 |
+
|
| 309 |
+
prefix = "django_%s_template_" % self.app_or_project
|
| 310 |
+
tempdir = tempfile.mkdtemp(prefix=prefix, suffix="_download")
|
| 311 |
+
self.paths_to_remove.append(tempdir)
|
| 312 |
+
filename, display_url = cleanup_url(url)
|
| 313 |
+
|
| 314 |
+
if self.verbosity >= 2:
|
| 315 |
+
self.stdout.write("Downloading %s" % display_url)
|
| 316 |
+
|
| 317 |
+
the_path = os.path.join(tempdir, filename)
|
| 318 |
+
opener = build_opener()
|
| 319 |
+
opener.addheaders = [("User-Agent", f"Django/{django.__version__}")]
|
| 320 |
+
try:
|
| 321 |
+
with opener.open(url) as source, open(the_path, "wb") as target:
|
| 322 |
+
headers = source.info()
|
| 323 |
+
target.write(source.read())
|
| 324 |
+
except OSError as e:
|
| 325 |
+
raise CommandError(
|
| 326 |
+
"couldn't download URL %s to %s: %s" % (url, filename, e)
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
used_name = the_path.split("/")[-1]
|
| 330 |
+
|
| 331 |
+
# Trying to get better name from response headers
|
| 332 |
+
content_disposition = headers["content-disposition"]
|
| 333 |
+
if content_disposition:
|
| 334 |
+
_, params = parse_header_parameters(content_disposition)
|
| 335 |
+
guessed_filename = params.get("filename") or used_name
|
| 336 |
+
else:
|
| 337 |
+
guessed_filename = used_name
|
| 338 |
+
|
| 339 |
+
# Falling back to content type guessing
|
| 340 |
+
ext = self.splitext(guessed_filename)[1]
|
| 341 |
+
content_type = headers["content-type"]
|
| 342 |
+
if not ext and content_type:
|
| 343 |
+
ext = mimetypes.guess_extension(content_type)
|
| 344 |
+
if ext:
|
| 345 |
+
guessed_filename += ext
|
| 346 |
+
|
| 347 |
+
# Move the temporary file to a filename that has better
|
| 348 |
+
# chances of being recognized by the archive utils
|
| 349 |
+
if used_name != guessed_filename:
|
| 350 |
+
guessed_path = os.path.join(tempdir, guessed_filename)
|
| 351 |
+
shutil.move(the_path, guessed_path)
|
| 352 |
+
return guessed_path
|
| 353 |
+
|
| 354 |
+
# Giving up
|
| 355 |
+
return the_path
|
| 356 |
+
|
| 357 |
+
def splitext(self, the_path):
|
| 358 |
+
"""
|
| 359 |
+
Like os.path.splitext, but takes off .tar, too
|
| 360 |
+
"""
|
| 361 |
+
base, ext = posixpath.splitext(the_path)
|
| 362 |
+
if base.lower().endswith(".tar"):
|
| 363 |
+
ext = base[-4:] + ext
|
| 364 |
+
base = base[:-4]
|
| 365 |
+
return base, ext
|
| 366 |
+
|
| 367 |
+
def extract(self, filename):
|
| 368 |
+
"""
|
| 369 |
+
Extract the given file to a temporary directory and return
|
| 370 |
+
the path of the directory with the extracted content.
|
| 371 |
+
"""
|
| 372 |
+
prefix = "django_%s_template_" % self.app_or_project
|
| 373 |
+
tempdir = tempfile.mkdtemp(prefix=prefix, suffix="_extract")
|
| 374 |
+
self.paths_to_remove.append(tempdir)
|
| 375 |
+
if self.verbosity >= 2:
|
| 376 |
+
self.stdout.write("Extracting %s" % filename)
|
| 377 |
+
try:
|
| 378 |
+
archive.extract(filename, tempdir)
|
| 379 |
+
return tempdir
|
| 380 |
+
except (archive.ArchiveException, OSError) as e:
|
| 381 |
+
raise CommandError(
|
| 382 |
+
"couldn't extract file %s to %s: %s" % (filename, tempdir, e)
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
def is_url(self, template):
|
| 386 |
+
"""Return True if the name looks like a URL."""
|
| 387 |
+
if ":" not in template:
|
| 388 |
+
return False
|
| 389 |
+
scheme = template.split(":", 1)[0].lower()
|
| 390 |
+
return scheme in self.url_schemes
|
| 391 |
+
|
| 392 |
+
def apply_umask(self, old_path, new_path):
|
| 393 |
+
current_umask = os.umask(0)
|
| 394 |
+
os.umask(current_umask)
|
| 395 |
+
current_mode = stat.S_IMODE(os.stat(old_path).st_mode)
|
| 396 |
+
os.chmod(new_path, current_mode & ~current_umask)
|
| 397 |
+
|
| 398 |
+
def make_writeable(self, filename):
|
| 399 |
+
"""
|
| 400 |
+
Make sure that the file is writeable.
|
| 401 |
+
Useful if our source is read-only.
|
| 402 |
+
"""
|
| 403 |
+
if not os.access(filename, os.W_OK):
|
| 404 |
+
st = os.stat(filename)
|
| 405 |
+
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
|
| 406 |
+
os.chmod(filename, new_permissions)
|
testbed/django__django/django/core/management/utils.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import fnmatch
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
import subprocess
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from subprocess import run
|
| 7 |
+
|
| 8 |
+
from django.apps import apps as installed_apps
|
| 9 |
+
from django.utils.crypto import get_random_string
|
| 10 |
+
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
|
| 11 |
+
|
| 12 |
+
from .base import CommandError, CommandParser
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def popen_wrapper(args, stdout_encoding="utf-8"):
|
| 16 |
+
"""
|
| 17 |
+
Friendly wrapper around Popen.
|
| 18 |
+
|
| 19 |
+
Return stdout output, stderr output, and OS status code.
|
| 20 |
+
"""
|
| 21 |
+
try:
|
| 22 |
+
p = run(args, capture_output=True, close_fds=os.name != "nt")
|
| 23 |
+
except OSError as err:
|
| 24 |
+
raise CommandError("Error executing %s" % args[0]) from err
|
| 25 |
+
return (
|
| 26 |
+
p.stdout.decode(stdout_encoding),
|
| 27 |
+
p.stderr.decode(DEFAULT_LOCALE_ENCODING, errors="replace"),
|
| 28 |
+
p.returncode,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def handle_extensions(extensions):
|
| 33 |
+
"""
|
| 34 |
+
Organize multiple extensions that are separated with commas or passed by
|
| 35 |
+
using --extension/-e multiple times.
|
| 36 |
+
|
| 37 |
+
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
|
| 38 |
+
would result in an extension list: ['.js', '.txt', '.xhtml']
|
| 39 |
+
|
| 40 |
+
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
|
| 41 |
+
{'.html', '.js', '.py'}
|
| 42 |
+
>>> handle_extensions(['.html, txt,.tpl'])
|
| 43 |
+
{'.html', '.tpl', '.txt'}
|
| 44 |
+
"""
|
| 45 |
+
ext_list = []
|
| 46 |
+
for ext in extensions:
|
| 47 |
+
ext_list.extend(ext.replace(" ", "").split(","))
|
| 48 |
+
for i, ext in enumerate(ext_list):
|
| 49 |
+
if not ext.startswith("."):
|
| 50 |
+
ext_list[i] = ".%s" % ext_list[i]
|
| 51 |
+
return set(ext_list)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def find_command(cmd, path=None, pathext=None):
|
| 55 |
+
if path is None:
|
| 56 |
+
path = os.environ.get("PATH", "").split(os.pathsep)
|
| 57 |
+
if isinstance(path, str):
|
| 58 |
+
path = [path]
|
| 59 |
+
# check if there are funny path extensions for executables, e.g. Windows
|
| 60 |
+
if pathext is None:
|
| 61 |
+
pathext = os.environ.get("PATHEXT", ".COM;.EXE;.BAT;.CMD").split(os.pathsep)
|
| 62 |
+
# don't use extensions if the command ends with one of them
|
| 63 |
+
for ext in pathext:
|
| 64 |
+
if cmd.endswith(ext):
|
| 65 |
+
pathext = [""]
|
| 66 |
+
break
|
| 67 |
+
# check if we find the command on PATH
|
| 68 |
+
for p in path:
|
| 69 |
+
f = os.path.join(p, cmd)
|
| 70 |
+
if os.path.isfile(f):
|
| 71 |
+
return f
|
| 72 |
+
for ext in pathext:
|
| 73 |
+
fext = f + ext
|
| 74 |
+
if os.path.isfile(fext):
|
| 75 |
+
return fext
|
| 76 |
+
return None
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def get_random_secret_key():
|
| 80 |
+
"""
|
| 81 |
+
Return a 50 character random string usable as a SECRET_KEY setting value.
|
| 82 |
+
"""
|
| 83 |
+
chars = "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)"
|
| 84 |
+
return get_random_string(50, chars)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def parse_apps_and_model_labels(labels):
|
| 88 |
+
"""
|
| 89 |
+
Parse a list of "app_label.ModelName" or "app_label" strings into actual
|
| 90 |
+
objects and return a two-element tuple:
|
| 91 |
+
(set of model classes, set of app_configs).
|
| 92 |
+
Raise a CommandError if some specified models or apps don't exist.
|
| 93 |
+
"""
|
| 94 |
+
apps = set()
|
| 95 |
+
models = set()
|
| 96 |
+
|
| 97 |
+
for label in labels:
|
| 98 |
+
if "." in label:
|
| 99 |
+
try:
|
| 100 |
+
model = installed_apps.get_model(label)
|
| 101 |
+
except LookupError:
|
| 102 |
+
raise CommandError("Unknown model: %s" % label)
|
| 103 |
+
models.add(model)
|
| 104 |
+
else:
|
| 105 |
+
try:
|
| 106 |
+
app_config = installed_apps.get_app_config(label)
|
| 107 |
+
except LookupError as e:
|
| 108 |
+
raise CommandError(str(e))
|
| 109 |
+
apps.add(app_config)
|
| 110 |
+
|
| 111 |
+
return models, apps
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def get_command_line_option(argv, option):
|
| 115 |
+
"""
|
| 116 |
+
Return the value of a command line option (which should include leading
|
| 117 |
+
dashes, e.g. '--testrunner') from an argument list. Return None if the
|
| 118 |
+
option wasn't passed or if the argument list couldn't be parsed.
|
| 119 |
+
"""
|
| 120 |
+
parser = CommandParser(add_help=False, allow_abbrev=False)
|
| 121 |
+
parser.add_argument(option, dest="value")
|
| 122 |
+
try:
|
| 123 |
+
options, _ = parser.parse_known_args(argv[2:])
|
| 124 |
+
except CommandError:
|
| 125 |
+
return None
|
| 126 |
+
else:
|
| 127 |
+
return options.value
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def normalize_path_patterns(patterns):
|
| 131 |
+
"""Normalize an iterable of glob style patterns based on OS."""
|
| 132 |
+
patterns = [os.path.normcase(p) for p in patterns]
|
| 133 |
+
dir_suffixes = {"%s*" % path_sep for path_sep in {"/", os.sep}}
|
| 134 |
+
norm_patterns = []
|
| 135 |
+
for pattern in patterns:
|
| 136 |
+
for dir_suffix in dir_suffixes:
|
| 137 |
+
if pattern.endswith(dir_suffix):
|
| 138 |
+
norm_patterns.append(pattern.removesuffix(dir_suffix))
|
| 139 |
+
break
|
| 140 |
+
else:
|
| 141 |
+
norm_patterns.append(pattern)
|
| 142 |
+
return norm_patterns
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def is_ignored_path(path, ignore_patterns):
|
| 146 |
+
"""
|
| 147 |
+
Check if the given path should be ignored or not based on matching
|
| 148 |
+
one of the glob style `ignore_patterns`.
|
| 149 |
+
"""
|
| 150 |
+
path = Path(path)
|
| 151 |
+
|
| 152 |
+
def ignore(pattern):
|
| 153 |
+
return fnmatch.fnmatchcase(path.name, pattern) or fnmatch.fnmatchcase(
|
| 154 |
+
str(path), pattern
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
return any(ignore(pattern) for pattern in normalize_path_patterns(ignore_patterns))
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def find_formatters():
|
| 161 |
+
return {"black_path": shutil.which("black")}
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def run_formatters(written_files, black_path=(sentinel := object())):
|
| 165 |
+
"""
|
| 166 |
+
Run the black formatter on the specified files.
|
| 167 |
+
"""
|
| 168 |
+
# Use a sentinel rather than None, as which() returns None when not found.
|
| 169 |
+
if black_path is sentinel:
|
| 170 |
+
black_path = shutil.which("black")
|
| 171 |
+
if black_path:
|
| 172 |
+
subprocess.run(
|
| 173 |
+
[black_path, "--fast", "--", *written_files],
|
| 174 |
+
capture_output=True,
|
| 175 |
+
)
|
testbed/django__django/django/core/paginator.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections.abc
|
| 2 |
+
import inspect
|
| 3 |
+
import warnings
|
| 4 |
+
from math import ceil
|
| 5 |
+
|
| 6 |
+
from django.utils.functional import cached_property
|
| 7 |
+
from django.utils.inspect import method_has_no_args
|
| 8 |
+
from django.utils.translation import gettext_lazy as _
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class UnorderedObjectListWarning(RuntimeWarning):
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class InvalidPage(Exception):
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class PageNotAnInteger(InvalidPage):
|
| 20 |
+
pass
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class EmptyPage(InvalidPage):
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Paginator:
|
| 28 |
+
# Translators: String used to replace omitted page numbers in elided page
|
| 29 |
+
# range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
|
| 30 |
+
ELLIPSIS = _("…")
|
| 31 |
+
default_error_messages = {
|
| 32 |
+
"invalid_page": _("That page number is not an integer"),
|
| 33 |
+
"min_page": _("That page number is less than 1"),
|
| 34 |
+
"no_results": _("That page contains no results"),
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
def __init__(
|
| 38 |
+
self,
|
| 39 |
+
object_list,
|
| 40 |
+
per_page,
|
| 41 |
+
orphans=0,
|
| 42 |
+
allow_empty_first_page=True,
|
| 43 |
+
error_messages=None,
|
| 44 |
+
):
|
| 45 |
+
self.object_list = object_list
|
| 46 |
+
self._check_object_list_is_ordered()
|
| 47 |
+
self.per_page = int(per_page)
|
| 48 |
+
self.orphans = int(orphans)
|
| 49 |
+
self.allow_empty_first_page = allow_empty_first_page
|
| 50 |
+
self.error_messages = (
|
| 51 |
+
self.default_error_messages
|
| 52 |
+
if error_messages is None
|
| 53 |
+
else self.default_error_messages | error_messages
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
def __iter__(self):
|
| 57 |
+
for page_number in self.page_range:
|
| 58 |
+
yield self.page(page_number)
|
| 59 |
+
|
| 60 |
+
def validate_number(self, number):
|
| 61 |
+
"""Validate the given 1-based page number."""
|
| 62 |
+
try:
|
| 63 |
+
if isinstance(number, float) and not number.is_integer():
|
| 64 |
+
raise ValueError
|
| 65 |
+
number = int(number)
|
| 66 |
+
except (TypeError, ValueError):
|
| 67 |
+
raise PageNotAnInteger(self.error_messages["invalid_page"])
|
| 68 |
+
if number < 1:
|
| 69 |
+
raise EmptyPage(self.error_messages["min_page"])
|
| 70 |
+
if number > self.num_pages:
|
| 71 |
+
raise EmptyPage(self.error_messages["no_results"])
|
| 72 |
+
return number
|
| 73 |
+
|
| 74 |
+
def get_page(self, number):
|
| 75 |
+
"""
|
| 76 |
+
Return a valid page, even if the page argument isn't a number or isn't
|
| 77 |
+
in range.
|
| 78 |
+
"""
|
| 79 |
+
try:
|
| 80 |
+
number = self.validate_number(number)
|
| 81 |
+
except PageNotAnInteger:
|
| 82 |
+
number = 1
|
| 83 |
+
except EmptyPage:
|
| 84 |
+
number = self.num_pages
|
| 85 |
+
return self.page(number)
|
| 86 |
+
|
| 87 |
+
def page(self, number):
|
| 88 |
+
"""Return a Page object for the given 1-based page number."""
|
| 89 |
+
number = self.validate_number(number)
|
| 90 |
+
bottom = (number - 1) * self.per_page
|
| 91 |
+
top = bottom + self.per_page
|
| 92 |
+
if top + self.orphans >= self.count:
|
| 93 |
+
top = self.count
|
| 94 |
+
return self._get_page(self.object_list[bottom:top], number, self)
|
| 95 |
+
|
| 96 |
+
def _get_page(self, *args, **kwargs):
|
| 97 |
+
"""
|
| 98 |
+
Return an instance of a single page.
|
| 99 |
+
|
| 100 |
+
This hook can be used by subclasses to use an alternative to the
|
| 101 |
+
standard :cls:`Page` object.
|
| 102 |
+
"""
|
| 103 |
+
return Page(*args, **kwargs)
|
| 104 |
+
|
| 105 |
+
@cached_property
|
| 106 |
+
def count(self):
|
| 107 |
+
"""Return the total number of objects, across all pages."""
|
| 108 |
+
c = getattr(self.object_list, "count", None)
|
| 109 |
+
if callable(c) and not inspect.isbuiltin(c) and method_has_no_args(c):
|
| 110 |
+
return c()
|
| 111 |
+
return len(self.object_list)
|
| 112 |
+
|
| 113 |
+
@cached_property
|
| 114 |
+
def num_pages(self):
|
| 115 |
+
"""Return the total number of pages."""
|
| 116 |
+
if self.count == 0 and not self.allow_empty_first_page:
|
| 117 |
+
return 0
|
| 118 |
+
hits = max(1, self.count - self.orphans)
|
| 119 |
+
return ceil(hits / self.per_page)
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def page_range(self):
|
| 123 |
+
"""
|
| 124 |
+
Return a 1-based range of pages for iterating through within
|
| 125 |
+
a template for loop.
|
| 126 |
+
"""
|
| 127 |
+
return range(1, self.num_pages + 1)
|
| 128 |
+
|
| 129 |
+
def _check_object_list_is_ordered(self):
|
| 130 |
+
"""
|
| 131 |
+
Warn if self.object_list is unordered (typically a QuerySet).
|
| 132 |
+
"""
|
| 133 |
+
ordered = getattr(self.object_list, "ordered", None)
|
| 134 |
+
if ordered is not None and not ordered:
|
| 135 |
+
obj_list_repr = (
|
| 136 |
+
"{} {}".format(
|
| 137 |
+
self.object_list.model, self.object_list.__class__.__name__
|
| 138 |
+
)
|
| 139 |
+
if hasattr(self.object_list, "model")
|
| 140 |
+
else "{!r}".format(self.object_list)
|
| 141 |
+
)
|
| 142 |
+
warnings.warn(
|
| 143 |
+
"Pagination may yield inconsistent results with an unordered "
|
| 144 |
+
"object_list: {}.".format(obj_list_repr),
|
| 145 |
+
UnorderedObjectListWarning,
|
| 146 |
+
stacklevel=3,
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
def get_elided_page_range(self, number=1, *, on_each_side=3, on_ends=2):
|
| 150 |
+
"""
|
| 151 |
+
Return a 1-based range of pages with some values elided.
|
| 152 |
+
|
| 153 |
+
If the page range is larger than a given size, the whole range is not
|
| 154 |
+
provided and a compact form is returned instead, e.g. for a paginator
|
| 155 |
+
with 50 pages, if page 43 were the current page, the output, with the
|
| 156 |
+
default arguments, would be:
|
| 157 |
+
|
| 158 |
+
1, 2, …, 40, 41, 42, 43, 44, 45, 46, …, 49, 50.
|
| 159 |
+
"""
|
| 160 |
+
number = self.validate_number(number)
|
| 161 |
+
|
| 162 |
+
if self.num_pages <= (on_each_side + on_ends) * 2:
|
| 163 |
+
yield from self.page_range
|
| 164 |
+
return
|
| 165 |
+
|
| 166 |
+
if number > (1 + on_each_side + on_ends) + 1:
|
| 167 |
+
yield from range(1, on_ends + 1)
|
| 168 |
+
yield self.ELLIPSIS
|
| 169 |
+
yield from range(number - on_each_side, number + 1)
|
| 170 |
+
else:
|
| 171 |
+
yield from range(1, number + 1)
|
| 172 |
+
|
| 173 |
+
if number < (self.num_pages - on_each_side - on_ends) - 1:
|
| 174 |
+
yield from range(number + 1, number + on_each_side + 1)
|
| 175 |
+
yield self.ELLIPSIS
|
| 176 |
+
yield from range(self.num_pages - on_ends + 1, self.num_pages + 1)
|
| 177 |
+
else:
|
| 178 |
+
yield from range(number + 1, self.num_pages + 1)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class Page(collections.abc.Sequence):
|
| 182 |
+
def __init__(self, object_list, number, paginator):
|
| 183 |
+
self.object_list = object_list
|
| 184 |
+
self.number = number
|
| 185 |
+
self.paginator = paginator
|
| 186 |
+
|
| 187 |
+
def __repr__(self):
|
| 188 |
+
return "<Page %s of %s>" % (self.number, self.paginator.num_pages)
|
| 189 |
+
|
| 190 |
+
def __len__(self):
|
| 191 |
+
return len(self.object_list)
|
| 192 |
+
|
| 193 |
+
def __getitem__(self, index):
|
| 194 |
+
if not isinstance(index, (int, slice)):
|
| 195 |
+
raise TypeError(
|
| 196 |
+
"Page indices must be integers or slices, not %s."
|
| 197 |
+
% type(index).__name__
|
| 198 |
+
)
|
| 199 |
+
# The object_list is converted to a list so that if it was a QuerySet
|
| 200 |
+
# it won't be a database hit per __getitem__.
|
| 201 |
+
if not isinstance(self.object_list, list):
|
| 202 |
+
self.object_list = list(self.object_list)
|
| 203 |
+
return self.object_list[index]
|
| 204 |
+
|
| 205 |
+
def has_next(self):
|
| 206 |
+
return self.number < self.paginator.num_pages
|
| 207 |
+
|
| 208 |
+
def has_previous(self):
|
| 209 |
+
return self.number > 1
|
| 210 |
+
|
| 211 |
+
def has_other_pages(self):
|
| 212 |
+
return self.has_previous() or self.has_next()
|
| 213 |
+
|
| 214 |
+
def next_page_number(self):
|
| 215 |
+
return self.paginator.validate_number(self.number + 1)
|
| 216 |
+
|
| 217 |
+
def previous_page_number(self):
|
| 218 |
+
return self.paginator.validate_number(self.number - 1)
|
| 219 |
+
|
| 220 |
+
def start_index(self):
|
| 221 |
+
"""
|
| 222 |
+
Return the 1-based index of the first object on this page,
|
| 223 |
+
relative to total objects in the paginator.
|
| 224 |
+
"""
|
| 225 |
+
# Special case, return zero if no items.
|
| 226 |
+
if self.paginator.count == 0:
|
| 227 |
+
return 0
|
| 228 |
+
return (self.paginator.per_page * (self.number - 1)) + 1
|
| 229 |
+
|
| 230 |
+
def end_index(self):
|
| 231 |
+
"""
|
| 232 |
+
Return the 1-based index of the last object on this page,
|
| 233 |
+
relative to total objects found (hits).
|
| 234 |
+
"""
|
| 235 |
+
# Special case for the last page because there can be orphans.
|
| 236 |
+
if self.number == self.paginator.num_pages:
|
| 237 |
+
return self.paginator.count
|
| 238 |
+
return self.number * self.paginator.per_page
|
testbed/django__django/django/core/serializers/__init__.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Interfaces for serializing Django objects.
|
| 3 |
+
|
| 4 |
+
Usage::
|
| 5 |
+
|
| 6 |
+
from django.core import serializers
|
| 7 |
+
json = serializers.serialize("json", some_queryset)
|
| 8 |
+
objects = list(serializers.deserialize("json", json))
|
| 9 |
+
|
| 10 |
+
To add your own serializers, use the SERIALIZATION_MODULES setting::
|
| 11 |
+
|
| 12 |
+
SERIALIZATION_MODULES = {
|
| 13 |
+
"csv": "path.to.csv.serializer",
|
| 14 |
+
"txt": "path.to.txt.serializer",
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import importlib
|
| 20 |
+
|
| 21 |
+
from django.apps import apps
|
| 22 |
+
from django.conf import settings
|
| 23 |
+
from django.core.serializers.base import SerializerDoesNotExist
|
| 24 |
+
|
| 25 |
+
# Built-in serializers
|
| 26 |
+
BUILTIN_SERIALIZERS = {
|
| 27 |
+
"xml": "django.core.serializers.xml_serializer",
|
| 28 |
+
"python": "django.core.serializers.python",
|
| 29 |
+
"json": "django.core.serializers.json",
|
| 30 |
+
"yaml": "django.core.serializers.pyyaml",
|
| 31 |
+
"jsonl": "django.core.serializers.jsonl",
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
_serializers = {}
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class BadSerializer:
|
| 38 |
+
"""
|
| 39 |
+
Stub serializer to hold exception raised during registration
|
| 40 |
+
|
| 41 |
+
This allows the serializer registration to cache serializers and if there
|
| 42 |
+
is an error raised in the process of creating a serializer it will be
|
| 43 |
+
raised and passed along to the caller when the serializer is used.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
internal_use_only = False
|
| 47 |
+
|
| 48 |
+
def __init__(self, exception):
|
| 49 |
+
self.exception = exception
|
| 50 |
+
|
| 51 |
+
def __call__(self, *args, **kwargs):
|
| 52 |
+
raise self.exception
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def register_serializer(format, serializer_module, serializers=None):
|
| 56 |
+
"""Register a new serializer.
|
| 57 |
+
|
| 58 |
+
``serializer_module`` should be the fully qualified module name
|
| 59 |
+
for the serializer.
|
| 60 |
+
|
| 61 |
+
If ``serializers`` is provided, the registration will be added
|
| 62 |
+
to the provided dictionary.
|
| 63 |
+
|
| 64 |
+
If ``serializers`` is not provided, the registration will be made
|
| 65 |
+
directly into the global register of serializers. Adding serializers
|
| 66 |
+
directly is not a thread-safe operation.
|
| 67 |
+
"""
|
| 68 |
+
if serializers is None and not _serializers:
|
| 69 |
+
_load_serializers()
|
| 70 |
+
|
| 71 |
+
try:
|
| 72 |
+
module = importlib.import_module(serializer_module)
|
| 73 |
+
except ImportError as exc:
|
| 74 |
+
bad_serializer = BadSerializer(exc)
|
| 75 |
+
|
| 76 |
+
module = type(
|
| 77 |
+
"BadSerializerModule",
|
| 78 |
+
(),
|
| 79 |
+
{
|
| 80 |
+
"Deserializer": bad_serializer,
|
| 81 |
+
"Serializer": bad_serializer,
|
| 82 |
+
},
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
if serializers is None:
|
| 86 |
+
_serializers[format] = module
|
| 87 |
+
else:
|
| 88 |
+
serializers[format] = module
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def unregister_serializer(format):
|
| 92 |
+
"Unregister a given serializer. This is not a thread-safe operation."
|
| 93 |
+
if not _serializers:
|
| 94 |
+
_load_serializers()
|
| 95 |
+
if format not in _serializers:
|
| 96 |
+
raise SerializerDoesNotExist(format)
|
| 97 |
+
del _serializers[format]
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def get_serializer(format):
|
| 101 |
+
if not _serializers:
|
| 102 |
+
_load_serializers()
|
| 103 |
+
if format not in _serializers:
|
| 104 |
+
raise SerializerDoesNotExist(format)
|
| 105 |
+
return _serializers[format].Serializer
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def get_serializer_formats():
|
| 109 |
+
if not _serializers:
|
| 110 |
+
_load_serializers()
|
| 111 |
+
return list(_serializers)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def get_public_serializer_formats():
|
| 115 |
+
if not _serializers:
|
| 116 |
+
_load_serializers()
|
| 117 |
+
return [k for k, v in _serializers.items() if not v.Serializer.internal_use_only]
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def get_deserializer(format):
|
| 121 |
+
if not _serializers:
|
| 122 |
+
_load_serializers()
|
| 123 |
+
if format not in _serializers:
|
| 124 |
+
raise SerializerDoesNotExist(format)
|
| 125 |
+
return _serializers[format].Deserializer
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def serialize(format, queryset, **options):
|
| 129 |
+
"""
|
| 130 |
+
Serialize a queryset (or any iterator that returns database objects) using
|
| 131 |
+
a certain serializer.
|
| 132 |
+
"""
|
| 133 |
+
s = get_serializer(format)()
|
| 134 |
+
s.serialize(queryset, **options)
|
| 135 |
+
return s.getvalue()
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def deserialize(format, stream_or_string, **options):
|
| 139 |
+
"""
|
| 140 |
+
Deserialize a stream or a string. Return an iterator that yields ``(obj,
|
| 141 |
+
m2m_relation_dict)``, where ``obj`` is an instantiated -- but *unsaved* --
|
| 142 |
+
object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
|
| 143 |
+
list_of_related_objects}``.
|
| 144 |
+
"""
|
| 145 |
+
d = get_deserializer(format)
|
| 146 |
+
return d(stream_or_string, **options)
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def _load_serializers():
|
| 150 |
+
"""
|
| 151 |
+
Register built-in and settings-defined serializers. This is done lazily so
|
| 152 |
+
that user code has a chance to (e.g.) set up custom settings without
|
| 153 |
+
needing to be careful of import order.
|
| 154 |
+
"""
|
| 155 |
+
global _serializers
|
| 156 |
+
serializers = {}
|
| 157 |
+
for format in BUILTIN_SERIALIZERS:
|
| 158 |
+
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
|
| 159 |
+
if hasattr(settings, "SERIALIZATION_MODULES"):
|
| 160 |
+
for format in settings.SERIALIZATION_MODULES:
|
| 161 |
+
register_serializer(
|
| 162 |
+
format, settings.SERIALIZATION_MODULES[format], serializers
|
| 163 |
+
)
|
| 164 |
+
_serializers = serializers
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def sort_dependencies(app_list, allow_cycles=False):
|
| 168 |
+
"""Sort a list of (app_config, models) pairs into a single list of models.
|
| 169 |
+
|
| 170 |
+
The single list of models is sorted so that any model with a natural key
|
| 171 |
+
is serialized before a normal model, and any model with a natural key
|
| 172 |
+
dependency has it's dependencies serialized first.
|
| 173 |
+
|
| 174 |
+
If allow_cycles is True, return the best-effort ordering that will respect
|
| 175 |
+
most of dependencies but ignore some of them to break the cycles.
|
| 176 |
+
"""
|
| 177 |
+
# Process the list of models, and get the list of dependencies
|
| 178 |
+
model_dependencies = []
|
| 179 |
+
models = set()
|
| 180 |
+
for app_config, model_list in app_list:
|
| 181 |
+
if model_list is None:
|
| 182 |
+
model_list = app_config.get_models()
|
| 183 |
+
|
| 184 |
+
for model in model_list:
|
| 185 |
+
models.add(model)
|
| 186 |
+
# Add any explicitly defined dependencies
|
| 187 |
+
if hasattr(model, "natural_key"):
|
| 188 |
+
deps = getattr(model.natural_key, "dependencies", [])
|
| 189 |
+
if deps:
|
| 190 |
+
deps = [apps.get_model(dep) for dep in deps]
|
| 191 |
+
else:
|
| 192 |
+
deps = []
|
| 193 |
+
|
| 194 |
+
# Now add a dependency for any FK relation with a model that
|
| 195 |
+
# defines a natural key
|
| 196 |
+
for field in model._meta.fields:
|
| 197 |
+
if field.remote_field:
|
| 198 |
+
rel_model = field.remote_field.model
|
| 199 |
+
if hasattr(rel_model, "natural_key") and rel_model != model:
|
| 200 |
+
deps.append(rel_model)
|
| 201 |
+
# Also add a dependency for any simple M2M relation with a model
|
| 202 |
+
# that defines a natural key. M2M relations with explicit through
|
| 203 |
+
# models don't count as dependencies.
|
| 204 |
+
for field in model._meta.many_to_many:
|
| 205 |
+
if field.remote_field.through._meta.auto_created:
|
| 206 |
+
rel_model = field.remote_field.model
|
| 207 |
+
if hasattr(rel_model, "natural_key") and rel_model != model:
|
| 208 |
+
deps.append(rel_model)
|
| 209 |
+
model_dependencies.append((model, deps))
|
| 210 |
+
|
| 211 |
+
model_dependencies.reverse()
|
| 212 |
+
# Now sort the models to ensure that dependencies are met. This
|
| 213 |
+
# is done by repeatedly iterating over the input list of models.
|
| 214 |
+
# If all the dependencies of a given model are in the final list,
|
| 215 |
+
# that model is promoted to the end of the final list. This process
|
| 216 |
+
# continues until the input list is empty, or we do a full iteration
|
| 217 |
+
# over the input models without promoting a model to the final list.
|
| 218 |
+
# If we do a full iteration without a promotion, that means there are
|
| 219 |
+
# circular dependencies in the list.
|
| 220 |
+
model_list = []
|
| 221 |
+
while model_dependencies:
|
| 222 |
+
skipped = []
|
| 223 |
+
changed = False
|
| 224 |
+
while model_dependencies:
|
| 225 |
+
model, deps = model_dependencies.pop()
|
| 226 |
+
|
| 227 |
+
# If all of the models in the dependency list are either already
|
| 228 |
+
# on the final model list, or not on the original serialization list,
|
| 229 |
+
# then we've found another model with all it's dependencies satisfied.
|
| 230 |
+
if all(d not in models or d in model_list for d in deps):
|
| 231 |
+
model_list.append(model)
|
| 232 |
+
changed = True
|
| 233 |
+
else:
|
| 234 |
+
skipped.append((model, deps))
|
| 235 |
+
if not changed:
|
| 236 |
+
if allow_cycles:
|
| 237 |
+
# If cycles are allowed, add the last skipped model and ignore
|
| 238 |
+
# its dependencies. This could be improved by some graph
|
| 239 |
+
# analysis to ignore as few dependencies as possible.
|
| 240 |
+
model, _ = skipped.pop()
|
| 241 |
+
model_list.append(model)
|
| 242 |
+
else:
|
| 243 |
+
raise RuntimeError(
|
| 244 |
+
"Can't resolve dependencies for %s in serialized app list."
|
| 245 |
+
% ", ".join(
|
| 246 |
+
model._meta.label
|
| 247 |
+
for model, deps in sorted(
|
| 248 |
+
skipped, key=lambda obj: obj[0].__name__
|
| 249 |
+
)
|
| 250 |
+
),
|
| 251 |
+
)
|
| 252 |
+
model_dependencies = skipped
|
| 253 |
+
|
| 254 |
+
return model_list
|
testbed/django__django/django/core/serializers/base.py
ADDED
|
@@ -0,0 +1,386 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Module for abstract serializer/unserializer base classes.
|
| 3 |
+
"""
|
| 4 |
+
from io import StringIO
|
| 5 |
+
|
| 6 |
+
from django.core.exceptions import ObjectDoesNotExist
|
| 7 |
+
from django.db import models
|
| 8 |
+
|
| 9 |
+
DEFER_FIELD = object()
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class SerializerDoesNotExist(KeyError):
|
| 13 |
+
"""The requested serializer was not found."""
|
| 14 |
+
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class SerializationError(Exception):
|
| 19 |
+
"""Something bad happened during serialization."""
|
| 20 |
+
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class DeserializationError(Exception):
|
| 25 |
+
"""Something bad happened during deserialization."""
|
| 26 |
+
|
| 27 |
+
@classmethod
|
| 28 |
+
def WithData(cls, original_exc, model, fk, field_value):
|
| 29 |
+
"""
|
| 30 |
+
Factory method for creating a deserialization error which has a more
|
| 31 |
+
explanatory message.
|
| 32 |
+
"""
|
| 33 |
+
return cls(
|
| 34 |
+
"%s: (%s:pk=%s) field_value was '%s'"
|
| 35 |
+
% (original_exc, model, fk, field_value)
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class M2MDeserializationError(Exception):
|
| 40 |
+
"""Something bad happened during deserialization of a ManyToManyField."""
|
| 41 |
+
|
| 42 |
+
def __init__(self, original_exc, pk):
|
| 43 |
+
self.original_exc = original_exc
|
| 44 |
+
self.pk = pk
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class ProgressBar:
|
| 48 |
+
progress_width = 75
|
| 49 |
+
|
| 50 |
+
def __init__(self, output, total_count):
|
| 51 |
+
self.output = output
|
| 52 |
+
self.total_count = total_count
|
| 53 |
+
self.prev_done = 0
|
| 54 |
+
|
| 55 |
+
def update(self, count):
|
| 56 |
+
if not self.output:
|
| 57 |
+
return
|
| 58 |
+
perc = count * 100 // self.total_count
|
| 59 |
+
done = perc * self.progress_width // 100
|
| 60 |
+
if self.prev_done >= done:
|
| 61 |
+
return
|
| 62 |
+
self.prev_done = done
|
| 63 |
+
cr = "" if self.total_count == 1 else "\r"
|
| 64 |
+
self.output.write(
|
| 65 |
+
cr + "[" + "." * done + " " * (self.progress_width - done) + "]"
|
| 66 |
+
)
|
| 67 |
+
if done == self.progress_width:
|
| 68 |
+
self.output.write("\n")
|
| 69 |
+
self.output.flush()
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class Serializer:
|
| 73 |
+
"""
|
| 74 |
+
Abstract serializer base class.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
# Indicates if the implemented serializer is only available for
|
| 78 |
+
# internal Django use.
|
| 79 |
+
internal_use_only = False
|
| 80 |
+
progress_class = ProgressBar
|
| 81 |
+
stream_class = StringIO
|
| 82 |
+
|
| 83 |
+
def serialize(
|
| 84 |
+
self,
|
| 85 |
+
queryset,
|
| 86 |
+
*,
|
| 87 |
+
stream=None,
|
| 88 |
+
fields=None,
|
| 89 |
+
use_natural_foreign_keys=False,
|
| 90 |
+
use_natural_primary_keys=False,
|
| 91 |
+
progress_output=None,
|
| 92 |
+
object_count=0,
|
| 93 |
+
**options,
|
| 94 |
+
):
|
| 95 |
+
"""
|
| 96 |
+
Serialize a queryset.
|
| 97 |
+
"""
|
| 98 |
+
self.options = options
|
| 99 |
+
|
| 100 |
+
self.stream = stream if stream is not None else self.stream_class()
|
| 101 |
+
self.selected_fields = fields
|
| 102 |
+
self.use_natural_foreign_keys = use_natural_foreign_keys
|
| 103 |
+
self.use_natural_primary_keys = use_natural_primary_keys
|
| 104 |
+
progress_bar = self.progress_class(progress_output, object_count)
|
| 105 |
+
|
| 106 |
+
self.start_serialization()
|
| 107 |
+
self.first = True
|
| 108 |
+
for count, obj in enumerate(queryset, start=1):
|
| 109 |
+
self.start_object(obj)
|
| 110 |
+
# Use the concrete parent class' _meta instead of the object's _meta
|
| 111 |
+
# This is to avoid local_fields problems for proxy models. Refs #17717.
|
| 112 |
+
concrete_model = obj._meta.concrete_model
|
| 113 |
+
# When using natural primary keys, retrieve the pk field of the
|
| 114 |
+
# parent for multi-table inheritance child models. That field must
|
| 115 |
+
# be serialized, otherwise deserialization isn't possible.
|
| 116 |
+
if self.use_natural_primary_keys:
|
| 117 |
+
pk = concrete_model._meta.pk
|
| 118 |
+
pk_parent = (
|
| 119 |
+
pk if pk.remote_field and pk.remote_field.parent_link else None
|
| 120 |
+
)
|
| 121 |
+
else:
|
| 122 |
+
pk_parent = None
|
| 123 |
+
for field in concrete_model._meta.local_fields:
|
| 124 |
+
if field.serialize or field is pk_parent:
|
| 125 |
+
if field.remote_field is None:
|
| 126 |
+
if (
|
| 127 |
+
self.selected_fields is None
|
| 128 |
+
or field.attname in self.selected_fields
|
| 129 |
+
):
|
| 130 |
+
self.handle_field(obj, field)
|
| 131 |
+
else:
|
| 132 |
+
if (
|
| 133 |
+
self.selected_fields is None
|
| 134 |
+
or field.attname[:-3] in self.selected_fields
|
| 135 |
+
):
|
| 136 |
+
self.handle_fk_field(obj, field)
|
| 137 |
+
for field in concrete_model._meta.local_many_to_many:
|
| 138 |
+
if field.serialize:
|
| 139 |
+
if (
|
| 140 |
+
self.selected_fields is None
|
| 141 |
+
or field.attname in self.selected_fields
|
| 142 |
+
):
|
| 143 |
+
self.handle_m2m_field(obj, field)
|
| 144 |
+
self.end_object(obj)
|
| 145 |
+
progress_bar.update(count)
|
| 146 |
+
self.first = self.first and False
|
| 147 |
+
self.end_serialization()
|
| 148 |
+
return self.getvalue()
|
| 149 |
+
|
| 150 |
+
def start_serialization(self):
|
| 151 |
+
"""
|
| 152 |
+
Called when serializing of the queryset starts.
|
| 153 |
+
"""
|
| 154 |
+
raise NotImplementedError(
|
| 155 |
+
"subclasses of Serializer must provide a start_serialization() method"
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def end_serialization(self):
|
| 159 |
+
"""
|
| 160 |
+
Called when serializing of the queryset ends.
|
| 161 |
+
"""
|
| 162 |
+
pass
|
| 163 |
+
|
| 164 |
+
def start_object(self, obj):
|
| 165 |
+
"""
|
| 166 |
+
Called when serializing of an object starts.
|
| 167 |
+
"""
|
| 168 |
+
raise NotImplementedError(
|
| 169 |
+
"subclasses of Serializer must provide a start_object() method"
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
def end_object(self, obj):
|
| 173 |
+
"""
|
| 174 |
+
Called when serializing of an object ends.
|
| 175 |
+
"""
|
| 176 |
+
pass
|
| 177 |
+
|
| 178 |
+
def handle_field(self, obj, field):
|
| 179 |
+
"""
|
| 180 |
+
Called to handle each individual (non-relational) field on an object.
|
| 181 |
+
"""
|
| 182 |
+
raise NotImplementedError(
|
| 183 |
+
"subclasses of Serializer must provide a handle_field() method"
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
def handle_fk_field(self, obj, field):
|
| 187 |
+
"""
|
| 188 |
+
Called to handle a ForeignKey field.
|
| 189 |
+
"""
|
| 190 |
+
raise NotImplementedError(
|
| 191 |
+
"subclasses of Serializer must provide a handle_fk_field() method"
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
def handle_m2m_field(self, obj, field):
|
| 195 |
+
"""
|
| 196 |
+
Called to handle a ManyToManyField.
|
| 197 |
+
"""
|
| 198 |
+
raise NotImplementedError(
|
| 199 |
+
"subclasses of Serializer must provide a handle_m2m_field() method"
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
def getvalue(self):
|
| 203 |
+
"""
|
| 204 |
+
Return the fully serialized queryset (or None if the output stream is
|
| 205 |
+
not seekable).
|
| 206 |
+
"""
|
| 207 |
+
if callable(getattr(self.stream, "getvalue", None)):
|
| 208 |
+
return self.stream.getvalue()
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
class Deserializer:
|
| 212 |
+
"""
|
| 213 |
+
Abstract base deserializer class.
|
| 214 |
+
"""
|
| 215 |
+
|
| 216 |
+
def __init__(self, stream_or_string, **options):
|
| 217 |
+
"""
|
| 218 |
+
Init this serializer given a stream or a string
|
| 219 |
+
"""
|
| 220 |
+
self.options = options
|
| 221 |
+
if isinstance(stream_or_string, str):
|
| 222 |
+
self.stream = StringIO(stream_or_string)
|
| 223 |
+
else:
|
| 224 |
+
self.stream = stream_or_string
|
| 225 |
+
|
| 226 |
+
def __iter__(self):
|
| 227 |
+
return self
|
| 228 |
+
|
| 229 |
+
def __next__(self):
|
| 230 |
+
"""Iteration interface -- return the next item in the stream"""
|
| 231 |
+
raise NotImplementedError(
|
| 232 |
+
"subclasses of Deserializer must provide a __next__() method"
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
class DeserializedObject:
|
| 237 |
+
"""
|
| 238 |
+
A deserialized model.
|
| 239 |
+
|
| 240 |
+
Basically a container for holding the pre-saved deserialized data along
|
| 241 |
+
with the many-to-many data saved with the object.
|
| 242 |
+
|
| 243 |
+
Call ``save()`` to save the object (with the many-to-many data) to the
|
| 244 |
+
database; call ``save(save_m2m=False)`` to save just the object fields
|
| 245 |
+
(and not touch the many-to-many stuff.)
|
| 246 |
+
"""
|
| 247 |
+
|
| 248 |
+
def __init__(self, obj, m2m_data=None, deferred_fields=None):
|
| 249 |
+
self.object = obj
|
| 250 |
+
self.m2m_data = m2m_data
|
| 251 |
+
self.deferred_fields = deferred_fields
|
| 252 |
+
|
| 253 |
+
def __repr__(self):
|
| 254 |
+
return "<%s: %s(pk=%s)>" % (
|
| 255 |
+
self.__class__.__name__,
|
| 256 |
+
self.object._meta.label,
|
| 257 |
+
self.object.pk,
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
def save(self, save_m2m=True, using=None, **kwargs):
|
| 261 |
+
# Call save on the Model baseclass directly. This bypasses any
|
| 262 |
+
# model-defined save. The save is also forced to be raw.
|
| 263 |
+
# raw=True is passed to any pre/post_save signals.
|
| 264 |
+
models.Model.save_base(self.object, using=using, raw=True, **kwargs)
|
| 265 |
+
if self.m2m_data and save_m2m:
|
| 266 |
+
for accessor_name, object_list in self.m2m_data.items():
|
| 267 |
+
getattr(self.object, accessor_name).set(object_list)
|
| 268 |
+
|
| 269 |
+
# prevent a second (possibly accidental) call to save() from saving
|
| 270 |
+
# the m2m data twice.
|
| 271 |
+
self.m2m_data = None
|
| 272 |
+
|
| 273 |
+
def save_deferred_fields(self, using=None):
|
| 274 |
+
self.m2m_data = {}
|
| 275 |
+
for field, field_value in self.deferred_fields.items():
|
| 276 |
+
opts = self.object._meta
|
| 277 |
+
label = opts.app_label + "." + opts.model_name
|
| 278 |
+
if isinstance(field.remote_field, models.ManyToManyRel):
|
| 279 |
+
try:
|
| 280 |
+
values = deserialize_m2m_values(
|
| 281 |
+
field, field_value, using, handle_forward_references=False
|
| 282 |
+
)
|
| 283 |
+
except M2MDeserializationError as e:
|
| 284 |
+
raise DeserializationError.WithData(
|
| 285 |
+
e.original_exc, label, self.object.pk, e.pk
|
| 286 |
+
)
|
| 287 |
+
self.m2m_data[field.name] = values
|
| 288 |
+
elif isinstance(field.remote_field, models.ManyToOneRel):
|
| 289 |
+
try:
|
| 290 |
+
value = deserialize_fk_value(
|
| 291 |
+
field, field_value, using, handle_forward_references=False
|
| 292 |
+
)
|
| 293 |
+
except Exception as e:
|
| 294 |
+
raise DeserializationError.WithData(
|
| 295 |
+
e, label, self.object.pk, field_value
|
| 296 |
+
)
|
| 297 |
+
setattr(self.object, field.attname, value)
|
| 298 |
+
self.save()
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def build_instance(Model, data, db):
|
| 302 |
+
"""
|
| 303 |
+
Build a model instance.
|
| 304 |
+
|
| 305 |
+
If the model instance doesn't have a primary key and the model supports
|
| 306 |
+
natural keys, try to retrieve it from the database.
|
| 307 |
+
"""
|
| 308 |
+
default_manager = Model._meta.default_manager
|
| 309 |
+
pk = data.get(Model._meta.pk.attname)
|
| 310 |
+
if (
|
| 311 |
+
pk is None
|
| 312 |
+
and hasattr(default_manager, "get_by_natural_key")
|
| 313 |
+
and hasattr(Model, "natural_key")
|
| 314 |
+
):
|
| 315 |
+
obj = Model(**data)
|
| 316 |
+
obj._state.db = db
|
| 317 |
+
natural_key = obj.natural_key()
|
| 318 |
+
try:
|
| 319 |
+
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
|
| 320 |
+
default_manager.db_manager(db).get_by_natural_key(*natural_key).pk
|
| 321 |
+
)
|
| 322 |
+
except Model.DoesNotExist:
|
| 323 |
+
pass
|
| 324 |
+
return Model(**data)
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
def deserialize_m2m_values(field, field_value, using, handle_forward_references):
|
| 328 |
+
model = field.remote_field.model
|
| 329 |
+
if hasattr(model._default_manager, "get_by_natural_key"):
|
| 330 |
+
|
| 331 |
+
def m2m_convert(value):
|
| 332 |
+
if hasattr(value, "__iter__") and not isinstance(value, str):
|
| 333 |
+
return (
|
| 334 |
+
model._default_manager.db_manager(using)
|
| 335 |
+
.get_by_natural_key(*value)
|
| 336 |
+
.pk
|
| 337 |
+
)
|
| 338 |
+
else:
|
| 339 |
+
return model._meta.pk.to_python(value)
|
| 340 |
+
|
| 341 |
+
else:
|
| 342 |
+
|
| 343 |
+
def m2m_convert(v):
|
| 344 |
+
return model._meta.pk.to_python(v)
|
| 345 |
+
|
| 346 |
+
try:
|
| 347 |
+
pks_iter = iter(field_value)
|
| 348 |
+
except TypeError as e:
|
| 349 |
+
raise M2MDeserializationError(e, field_value)
|
| 350 |
+
try:
|
| 351 |
+
values = []
|
| 352 |
+
for pk in pks_iter:
|
| 353 |
+
values.append(m2m_convert(pk))
|
| 354 |
+
return values
|
| 355 |
+
except Exception as e:
|
| 356 |
+
if isinstance(e, ObjectDoesNotExist) and handle_forward_references:
|
| 357 |
+
return DEFER_FIELD
|
| 358 |
+
else:
|
| 359 |
+
raise M2MDeserializationError(e, pk)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def deserialize_fk_value(field, field_value, using, handle_forward_references):
|
| 363 |
+
if field_value is None:
|
| 364 |
+
return None
|
| 365 |
+
model = field.remote_field.model
|
| 366 |
+
default_manager = model._default_manager
|
| 367 |
+
field_name = field.remote_field.field_name
|
| 368 |
+
if (
|
| 369 |
+
hasattr(default_manager, "get_by_natural_key")
|
| 370 |
+
and hasattr(field_value, "__iter__")
|
| 371 |
+
and not isinstance(field_value, str)
|
| 372 |
+
):
|
| 373 |
+
try:
|
| 374 |
+
obj = default_manager.db_manager(using).get_by_natural_key(*field_value)
|
| 375 |
+
except ObjectDoesNotExist:
|
| 376 |
+
if handle_forward_references:
|
| 377 |
+
return DEFER_FIELD
|
| 378 |
+
else:
|
| 379 |
+
raise
|
| 380 |
+
value = getattr(obj, field_name)
|
| 381 |
+
# If this is a natural foreign key to an object that has a FK/O2O as
|
| 382 |
+
# the foreign key, use the FK value.
|
| 383 |
+
if model._meta.pk.remote_field:
|
| 384 |
+
value = value.pk
|
| 385 |
+
return value
|
| 386 |
+
return model._meta.get_field(field_name).to_python(field_value)
|
testbed/django__django/django/core/serializers/json.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Serialize data to/from JSON
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import datetime
|
| 6 |
+
import decimal
|
| 7 |
+
import json
|
| 8 |
+
import uuid
|
| 9 |
+
|
| 10 |
+
from django.core.serializers.base import DeserializationError
|
| 11 |
+
from django.core.serializers.python import Deserializer as PythonDeserializer
|
| 12 |
+
from django.core.serializers.python import Serializer as PythonSerializer
|
| 13 |
+
from django.utils.duration import duration_iso_string
|
| 14 |
+
from django.utils.functional import Promise
|
| 15 |
+
from django.utils.timezone import is_aware
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Serializer(PythonSerializer):
|
| 19 |
+
"""Convert a queryset to JSON."""
|
| 20 |
+
|
| 21 |
+
internal_use_only = False
|
| 22 |
+
|
| 23 |
+
def _init_options(self):
|
| 24 |
+
self._current = None
|
| 25 |
+
self.json_kwargs = self.options.copy()
|
| 26 |
+
self.json_kwargs.pop("stream", None)
|
| 27 |
+
self.json_kwargs.pop("fields", None)
|
| 28 |
+
if self.options.get("indent"):
|
| 29 |
+
# Prevent trailing spaces
|
| 30 |
+
self.json_kwargs["separators"] = (",", ": ")
|
| 31 |
+
self.json_kwargs.setdefault("cls", DjangoJSONEncoder)
|
| 32 |
+
self.json_kwargs.setdefault("ensure_ascii", False)
|
| 33 |
+
|
| 34 |
+
def start_serialization(self):
|
| 35 |
+
self._init_options()
|
| 36 |
+
self.stream.write("[")
|
| 37 |
+
|
| 38 |
+
def end_serialization(self):
|
| 39 |
+
if self.options.get("indent"):
|
| 40 |
+
self.stream.write("\n")
|
| 41 |
+
self.stream.write("]")
|
| 42 |
+
if self.options.get("indent"):
|
| 43 |
+
self.stream.write("\n")
|
| 44 |
+
|
| 45 |
+
def end_object(self, obj):
|
| 46 |
+
# self._current has the field data
|
| 47 |
+
indent = self.options.get("indent")
|
| 48 |
+
if not self.first:
|
| 49 |
+
self.stream.write(",")
|
| 50 |
+
if not indent:
|
| 51 |
+
self.stream.write(" ")
|
| 52 |
+
if indent:
|
| 53 |
+
self.stream.write("\n")
|
| 54 |
+
json.dump(self.get_dump_object(obj), self.stream, **self.json_kwargs)
|
| 55 |
+
self._current = None
|
| 56 |
+
|
| 57 |
+
def getvalue(self):
|
| 58 |
+
# Grandparent super
|
| 59 |
+
return super(PythonSerializer, self).getvalue()
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def Deserializer(stream_or_string, **options):
|
| 63 |
+
"""Deserialize a stream or string of JSON data."""
|
| 64 |
+
if not isinstance(stream_or_string, (bytes, str)):
|
| 65 |
+
stream_or_string = stream_or_string.read()
|
| 66 |
+
if isinstance(stream_or_string, bytes):
|
| 67 |
+
stream_or_string = stream_or_string.decode()
|
| 68 |
+
try:
|
| 69 |
+
objects = json.loads(stream_or_string)
|
| 70 |
+
yield from PythonDeserializer(objects, **options)
|
| 71 |
+
except (GeneratorExit, DeserializationError):
|
| 72 |
+
raise
|
| 73 |
+
except Exception as exc:
|
| 74 |
+
raise DeserializationError() from exc
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class DjangoJSONEncoder(json.JSONEncoder):
|
| 78 |
+
"""
|
| 79 |
+
JSONEncoder subclass that knows how to encode date/time, decimal types, and
|
| 80 |
+
UUIDs.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
def default(self, o):
|
| 84 |
+
# See "Date Time String Format" in the ECMA-262 specification.
|
| 85 |
+
if isinstance(o, datetime.datetime):
|
| 86 |
+
r = o.isoformat()
|
| 87 |
+
if o.microsecond:
|
| 88 |
+
r = r[:23] + r[26:]
|
| 89 |
+
if r.endswith("+00:00"):
|
| 90 |
+
r = r.removesuffix("+00:00") + "Z"
|
| 91 |
+
return r
|
| 92 |
+
elif isinstance(o, datetime.date):
|
| 93 |
+
return o.isoformat()
|
| 94 |
+
elif isinstance(o, datetime.time):
|
| 95 |
+
if is_aware(o):
|
| 96 |
+
raise ValueError("JSON can't represent timezone-aware times.")
|
| 97 |
+
r = o.isoformat()
|
| 98 |
+
if o.microsecond:
|
| 99 |
+
r = r[:12]
|
| 100 |
+
return r
|
| 101 |
+
elif isinstance(o, datetime.timedelta):
|
| 102 |
+
return duration_iso_string(o)
|
| 103 |
+
elif isinstance(o, (decimal.Decimal, uuid.UUID, Promise)):
|
| 104 |
+
return str(o)
|
| 105 |
+
else:
|
| 106 |
+
return super().default(o)
|
testbed/django__django/django/core/serializers/jsonl.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Serialize data to/from JSON Lines
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
|
| 7 |
+
from django.core.serializers.base import DeserializationError
|
| 8 |
+
from django.core.serializers.json import DjangoJSONEncoder
|
| 9 |
+
from django.core.serializers.python import Deserializer as PythonDeserializer
|
| 10 |
+
from django.core.serializers.python import Serializer as PythonSerializer
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Serializer(PythonSerializer):
|
| 14 |
+
"""Convert a queryset to JSON Lines."""
|
| 15 |
+
|
| 16 |
+
internal_use_only = False
|
| 17 |
+
|
| 18 |
+
def _init_options(self):
|
| 19 |
+
self._current = None
|
| 20 |
+
self.json_kwargs = self.options.copy()
|
| 21 |
+
self.json_kwargs.pop("stream", None)
|
| 22 |
+
self.json_kwargs.pop("fields", None)
|
| 23 |
+
self.json_kwargs.pop("indent", None)
|
| 24 |
+
self.json_kwargs["separators"] = (",", ": ")
|
| 25 |
+
self.json_kwargs.setdefault("cls", DjangoJSONEncoder)
|
| 26 |
+
self.json_kwargs.setdefault("ensure_ascii", False)
|
| 27 |
+
|
| 28 |
+
def start_serialization(self):
|
| 29 |
+
self._init_options()
|
| 30 |
+
|
| 31 |
+
def end_object(self, obj):
|
| 32 |
+
# self._current has the field data
|
| 33 |
+
json.dump(self.get_dump_object(obj), self.stream, **self.json_kwargs)
|
| 34 |
+
self.stream.write("\n")
|
| 35 |
+
self._current = None
|
| 36 |
+
|
| 37 |
+
def getvalue(self):
|
| 38 |
+
# Grandparent super
|
| 39 |
+
return super(PythonSerializer, self).getvalue()
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def Deserializer(stream_or_string, **options):
|
| 43 |
+
"""Deserialize a stream or string of JSON data."""
|
| 44 |
+
if isinstance(stream_or_string, bytes):
|
| 45 |
+
stream_or_string = stream_or_string.decode()
|
| 46 |
+
if isinstance(stream_or_string, (bytes, str)):
|
| 47 |
+
stream_or_string = stream_or_string.split("\n")
|
| 48 |
+
|
| 49 |
+
for line in stream_or_string:
|
| 50 |
+
if not line.strip():
|
| 51 |
+
continue
|
| 52 |
+
try:
|
| 53 |
+
yield from PythonDeserializer([json.loads(line)], **options)
|
| 54 |
+
except (GeneratorExit, DeserializationError):
|
| 55 |
+
raise
|
| 56 |
+
except Exception as exc:
|
| 57 |
+
raise DeserializationError() from exc
|
testbed/django__django/django/core/serializers/python.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
A Python "serializer". Doesn't do much serializing per se -- just converts to
|
| 3 |
+
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
|
| 4 |
+
other serializers.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from django.apps import apps
|
| 8 |
+
from django.core.serializers import base
|
| 9 |
+
from django.db import DEFAULT_DB_ALIAS, models
|
| 10 |
+
from django.utils.encoding import is_protected_type
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Serializer(base.Serializer):
|
| 14 |
+
"""
|
| 15 |
+
Serialize a QuerySet to basic Python objects.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
internal_use_only = True
|
| 19 |
+
|
| 20 |
+
def start_serialization(self):
|
| 21 |
+
self._current = None
|
| 22 |
+
self.objects = []
|
| 23 |
+
|
| 24 |
+
def end_serialization(self):
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
def start_object(self, obj):
|
| 28 |
+
self._current = {}
|
| 29 |
+
|
| 30 |
+
def end_object(self, obj):
|
| 31 |
+
self.objects.append(self.get_dump_object(obj))
|
| 32 |
+
self._current = None
|
| 33 |
+
|
| 34 |
+
def get_dump_object(self, obj):
|
| 35 |
+
data = {"model": str(obj._meta)}
|
| 36 |
+
if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"):
|
| 37 |
+
data["pk"] = self._value_from_field(obj, obj._meta.pk)
|
| 38 |
+
data["fields"] = self._current
|
| 39 |
+
return data
|
| 40 |
+
|
| 41 |
+
def _value_from_field(self, obj, field):
|
| 42 |
+
value = field.value_from_object(obj)
|
| 43 |
+
# Protected types (i.e., primitives like None, numbers, dates,
|
| 44 |
+
# and Decimals) are passed through as is. All other values are
|
| 45 |
+
# converted to string first.
|
| 46 |
+
return value if is_protected_type(value) else field.value_to_string(obj)
|
| 47 |
+
|
| 48 |
+
def handle_field(self, obj, field):
|
| 49 |
+
self._current[field.name] = self._value_from_field(obj, field)
|
| 50 |
+
|
| 51 |
+
def handle_fk_field(self, obj, field):
|
| 52 |
+
if self.use_natural_foreign_keys and hasattr(
|
| 53 |
+
field.remote_field.model, "natural_key"
|
| 54 |
+
):
|
| 55 |
+
related = getattr(obj, field.name)
|
| 56 |
+
if related:
|
| 57 |
+
value = related.natural_key()
|
| 58 |
+
else:
|
| 59 |
+
value = None
|
| 60 |
+
else:
|
| 61 |
+
value = self._value_from_field(obj, field)
|
| 62 |
+
self._current[field.name] = value
|
| 63 |
+
|
| 64 |
+
def handle_m2m_field(self, obj, field):
|
| 65 |
+
if field.remote_field.through._meta.auto_created:
|
| 66 |
+
if self.use_natural_foreign_keys and hasattr(
|
| 67 |
+
field.remote_field.model, "natural_key"
|
| 68 |
+
):
|
| 69 |
+
|
| 70 |
+
def m2m_value(value):
|
| 71 |
+
return value.natural_key()
|
| 72 |
+
|
| 73 |
+
def queryset_iterator(obj, field):
|
| 74 |
+
return getattr(obj, field.name).iterator()
|
| 75 |
+
|
| 76 |
+
else:
|
| 77 |
+
|
| 78 |
+
def m2m_value(value):
|
| 79 |
+
return self._value_from_field(value, value._meta.pk)
|
| 80 |
+
|
| 81 |
+
def queryset_iterator(obj, field):
|
| 82 |
+
return (
|
| 83 |
+
getattr(obj, field.name).select_related().only("pk").iterator()
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
m2m_iter = getattr(obj, "_prefetched_objects_cache", {}).get(
|
| 87 |
+
field.name,
|
| 88 |
+
queryset_iterator(obj, field),
|
| 89 |
+
)
|
| 90 |
+
self._current[field.name] = [m2m_value(related) for related in m2m_iter]
|
| 91 |
+
|
| 92 |
+
def getvalue(self):
|
| 93 |
+
return self.objects
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def Deserializer(
|
| 97 |
+
object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options
|
| 98 |
+
):
|
| 99 |
+
"""
|
| 100 |
+
Deserialize simple Python objects back into Django ORM instances.
|
| 101 |
+
|
| 102 |
+
It's expected that you pass the Python objects themselves (instead of a
|
| 103 |
+
stream or a string) to the constructor
|
| 104 |
+
"""
|
| 105 |
+
handle_forward_references = options.pop("handle_forward_references", False)
|
| 106 |
+
field_names_cache = {} # Model: <list of field_names>
|
| 107 |
+
|
| 108 |
+
for d in object_list:
|
| 109 |
+
# Look up the model and starting build a dict of data for it.
|
| 110 |
+
try:
|
| 111 |
+
Model = _get_model(d["model"])
|
| 112 |
+
except base.DeserializationError:
|
| 113 |
+
if ignorenonexistent:
|
| 114 |
+
continue
|
| 115 |
+
else:
|
| 116 |
+
raise
|
| 117 |
+
data = {}
|
| 118 |
+
if "pk" in d:
|
| 119 |
+
try:
|
| 120 |
+
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get("pk"))
|
| 121 |
+
except Exception as e:
|
| 122 |
+
raise base.DeserializationError.WithData(
|
| 123 |
+
e, d["model"], d.get("pk"), None
|
| 124 |
+
)
|
| 125 |
+
m2m_data = {}
|
| 126 |
+
deferred_fields = {}
|
| 127 |
+
|
| 128 |
+
if Model not in field_names_cache:
|
| 129 |
+
field_names_cache[Model] = {f.name for f in Model._meta.get_fields()}
|
| 130 |
+
field_names = field_names_cache[Model]
|
| 131 |
+
|
| 132 |
+
# Handle each field
|
| 133 |
+
for field_name, field_value in d["fields"].items():
|
| 134 |
+
if ignorenonexistent and field_name not in field_names:
|
| 135 |
+
# skip fields no longer on model
|
| 136 |
+
continue
|
| 137 |
+
|
| 138 |
+
field = Model._meta.get_field(field_name)
|
| 139 |
+
|
| 140 |
+
# Handle M2M relations
|
| 141 |
+
if field.remote_field and isinstance(
|
| 142 |
+
field.remote_field, models.ManyToManyRel
|
| 143 |
+
):
|
| 144 |
+
try:
|
| 145 |
+
values = base.deserialize_m2m_values(
|
| 146 |
+
field, field_value, using, handle_forward_references
|
| 147 |
+
)
|
| 148 |
+
except base.M2MDeserializationError as e:
|
| 149 |
+
raise base.DeserializationError.WithData(
|
| 150 |
+
e.original_exc, d["model"], d.get("pk"), e.pk
|
| 151 |
+
)
|
| 152 |
+
if values == base.DEFER_FIELD:
|
| 153 |
+
deferred_fields[field] = field_value
|
| 154 |
+
else:
|
| 155 |
+
m2m_data[field.name] = values
|
| 156 |
+
# Handle FK fields
|
| 157 |
+
elif field.remote_field and isinstance(
|
| 158 |
+
field.remote_field, models.ManyToOneRel
|
| 159 |
+
):
|
| 160 |
+
try:
|
| 161 |
+
value = base.deserialize_fk_value(
|
| 162 |
+
field, field_value, using, handle_forward_references
|
| 163 |
+
)
|
| 164 |
+
except Exception as e:
|
| 165 |
+
raise base.DeserializationError.WithData(
|
| 166 |
+
e, d["model"], d.get("pk"), field_value
|
| 167 |
+
)
|
| 168 |
+
if value == base.DEFER_FIELD:
|
| 169 |
+
deferred_fields[field] = field_value
|
| 170 |
+
else:
|
| 171 |
+
data[field.attname] = value
|
| 172 |
+
# Handle all other fields
|
| 173 |
+
else:
|
| 174 |
+
try:
|
| 175 |
+
data[field.name] = field.to_python(field_value)
|
| 176 |
+
except Exception as e:
|
| 177 |
+
raise base.DeserializationError.WithData(
|
| 178 |
+
e, d["model"], d.get("pk"), field_value
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
obj = base.build_instance(Model, data, using)
|
| 182 |
+
yield base.DeserializedObject(obj, m2m_data, deferred_fields)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _get_model(model_identifier):
|
| 186 |
+
"""Look up a model from an "app_label.model_name" string."""
|
| 187 |
+
try:
|
| 188 |
+
return apps.get_model(model_identifier)
|
| 189 |
+
except (LookupError, TypeError):
|
| 190 |
+
raise base.DeserializationError(
|
| 191 |
+
"Invalid model identifier: '%s'" % model_identifier
|
| 192 |
+
)
|
testbed/django__django/django/core/serializers/pyyaml.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
YAML serializer.
|
| 3 |
+
|
| 4 |
+
Requires PyYaml (https://pyyaml.org/), but that's checked for in __init__.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import collections
|
| 8 |
+
import decimal
|
| 9 |
+
from io import StringIO
|
| 10 |
+
|
| 11 |
+
import yaml
|
| 12 |
+
|
| 13 |
+
from django.core.serializers.base import DeserializationError
|
| 14 |
+
from django.core.serializers.python import Deserializer as PythonDeserializer
|
| 15 |
+
from django.core.serializers.python import Serializer as PythonSerializer
|
| 16 |
+
from django.db import models
|
| 17 |
+
|
| 18 |
+
# Use the C (faster) implementation if possible
|
| 19 |
+
try:
|
| 20 |
+
from yaml import CSafeDumper as SafeDumper
|
| 21 |
+
from yaml import CSafeLoader as SafeLoader
|
| 22 |
+
except ImportError:
|
| 23 |
+
from yaml import SafeDumper, SafeLoader
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class DjangoSafeDumper(SafeDumper):
|
| 27 |
+
def represent_decimal(self, data):
|
| 28 |
+
return self.represent_scalar("tag:yaml.org,2002:str", str(data))
|
| 29 |
+
|
| 30 |
+
def represent_ordered_dict(self, data):
|
| 31 |
+
return self.represent_mapping("tag:yaml.org,2002:map", data.items())
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
|
| 35 |
+
DjangoSafeDumper.add_representer(
|
| 36 |
+
collections.OrderedDict, DjangoSafeDumper.represent_ordered_dict
|
| 37 |
+
)
|
| 38 |
+
# Workaround to represent dictionaries in insertion order.
|
| 39 |
+
# See https://github.com/yaml/pyyaml/pull/143.
|
| 40 |
+
DjangoSafeDumper.add_representer(dict, DjangoSafeDumper.represent_ordered_dict)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class Serializer(PythonSerializer):
|
| 44 |
+
"""Convert a queryset to YAML."""
|
| 45 |
+
|
| 46 |
+
internal_use_only = False
|
| 47 |
+
|
| 48 |
+
def handle_field(self, obj, field):
|
| 49 |
+
# A nasty special case: base YAML doesn't support serialization of time
|
| 50 |
+
# types (as opposed to dates or datetimes, which it does support). Since
|
| 51 |
+
# we want to use the "safe" serializer for better interoperability, we
|
| 52 |
+
# need to do something with those pesky times. Converting 'em to strings
|
| 53 |
+
# isn't perfect, but it's better than a "!!python/time" type which would
|
| 54 |
+
# halt deserialization under any other language.
|
| 55 |
+
if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
|
| 56 |
+
self._current[field.name] = str(getattr(obj, field.name))
|
| 57 |
+
else:
|
| 58 |
+
super().handle_field(obj, field)
|
| 59 |
+
|
| 60 |
+
def end_serialization(self):
|
| 61 |
+
self.options.setdefault("allow_unicode", True)
|
| 62 |
+
yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
|
| 63 |
+
|
| 64 |
+
def getvalue(self):
|
| 65 |
+
# Grandparent super
|
| 66 |
+
return super(PythonSerializer, self).getvalue()
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def Deserializer(stream_or_string, **options):
|
| 70 |
+
"""Deserialize a stream or string of YAML data."""
|
| 71 |
+
if isinstance(stream_or_string, bytes):
|
| 72 |
+
stream_or_string = stream_or_string.decode()
|
| 73 |
+
if isinstance(stream_or_string, str):
|
| 74 |
+
stream = StringIO(stream_or_string)
|
| 75 |
+
else:
|
| 76 |
+
stream = stream_or_string
|
| 77 |
+
try:
|
| 78 |
+
yield from PythonDeserializer(yaml.load(stream, Loader=SafeLoader), **options)
|
| 79 |
+
except (GeneratorExit, DeserializationError):
|
| 80 |
+
raise
|
| 81 |
+
except Exception as exc:
|
| 82 |
+
raise DeserializationError() from exc
|