diff --git a/.gitattributes b/.gitattributes index 559a6942ead003227b263ea99b1ad602b96a6729..03bf442ae474a65c0462233208c5224e1e9de0cb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -98,3 +98,5 @@ testbed/pydicom__pydicom/pydicom/data/test_files/OT-PAL-8-face.dcm filter=lfs di testbed/pydicom__pydicom/pydicom/data/test_files/SC_rgb_32bit.dcm filter=lfs diff=lfs merge=lfs -text testbed/pydicom__pydicom/pydicom/data/test_files/OBXXXX1A.dcm filter=lfs diff=lfs merge=lfs -text testbed/pvlib__pvlib-python/pvlib/data/aod550_tcwv_20121101_test.nc filter=lfs diff=lfs merge=lfs -text +testbed/mwaskom__seaborn/doc/_static/favicon_old.ico filter=lfs diff=lfs merge=lfs -text +testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.pdf filter=lfs diff=lfs merge=lfs -text diff --git a/testbed/django__django/django/core/__init__.py b/testbed/django__django/django/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/asgi.py b/testbed/django__django/django/core/asgi.py new file mode 100644 index 0000000000000000000000000000000000000000..0d846ccd160a039c57797b577652b723bd76db52 --- /dev/null +++ b/testbed/django__django/django/core/asgi.py @@ -0,0 +1,13 @@ +import django +from django.core.handlers.asgi import ASGIHandler + + +def get_asgi_application(): + """ + The public interface to Django's ASGI support. Return an ASGI 3 callable. + + Avoids making django.core.handlers.ASGIHandler a public API, in case the + internal implementation changes or moves in the future. + """ + django.setup(set_prefix=False) + return ASGIHandler() diff --git a/testbed/django__django/django/core/exceptions.py b/testbed/django__django/django/core/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..2a2288ff4dc9db1cd3fdea53c7039ca34d41d334 --- /dev/null +++ b/testbed/django__django/django/core/exceptions.py @@ -0,0 +1,254 @@ +""" +Global Django exception and warning classes. +""" +import operator + +from django.utils.hashable import make_hashable + + +class FieldDoesNotExist(Exception): + """The requested model field does not exist""" + + pass + + +class AppRegistryNotReady(Exception): + """The django.apps registry is not populated yet""" + + pass + + +class ObjectDoesNotExist(Exception): + """The requested object does not exist""" + + silent_variable_failure = True + + +class MultipleObjectsReturned(Exception): + """The query returned multiple objects when only one was expected.""" + + pass + + +class SuspiciousOperation(Exception): + """The user did something suspicious""" + + +class SuspiciousMultipartForm(SuspiciousOperation): + """Suspect MIME request in multipart form data""" + + pass + + +class SuspiciousFileOperation(SuspiciousOperation): + """A Suspicious filesystem operation was attempted""" + + pass + + +class DisallowedHost(SuspiciousOperation): + """HTTP_HOST header contains invalid value""" + + pass + + +class DisallowedRedirect(SuspiciousOperation): + """Redirect to scheme not in allowed list""" + + pass + + +class TooManyFieldsSent(SuspiciousOperation): + """ + The number of fields in a GET or POST request exceeded + settings.DATA_UPLOAD_MAX_NUMBER_FIELDS. + """ + + pass + + +class TooManyFilesSent(SuspiciousOperation): + """ + The number of fields in a GET or POST request exceeded + settings.DATA_UPLOAD_MAX_NUMBER_FILES. + """ + + pass + + +class RequestDataTooBig(SuspiciousOperation): + """ + The size of the request (excluding any file uploads) exceeded + settings.DATA_UPLOAD_MAX_MEMORY_SIZE. + """ + + pass + + +class RequestAborted(Exception): + """The request was closed before it was completed, or timed out.""" + + pass + + +class BadRequest(Exception): + """The request is malformed and cannot be processed.""" + + pass + + +class PermissionDenied(Exception): + """The user did not have permission to do that""" + + pass + + +class ViewDoesNotExist(Exception): + """The requested view does not exist""" + + pass + + +class MiddlewareNotUsed(Exception): + """This middleware is not used in this server configuration""" + + pass + + +class ImproperlyConfigured(Exception): + """Django is somehow improperly configured""" + + pass + + +class FieldError(Exception): + """Some kind of problem with a model field.""" + + pass + + +NON_FIELD_ERRORS = "__all__" + + +class ValidationError(Exception): + """An error while validating data.""" + + def __init__(self, message, code=None, params=None): + """ + The `message` argument can be a single error, a list of errors, or a + dictionary that maps field names to lists of errors. What we define as + an "error" can be either a simple string or an instance of + ValidationError with its message attribute set, and what we define as + list or dictionary can be an actual `list` or `dict` or an instance + of ValidationError with its `error_list` or `error_dict` attribute set. + """ + super().__init__(message, code, params) + + if isinstance(message, ValidationError): + if hasattr(message, "error_dict"): + message = message.error_dict + elif not hasattr(message, "message"): + message = message.error_list + else: + message, code, params = message.message, message.code, message.params + + if isinstance(message, dict): + self.error_dict = {} + for field, messages in message.items(): + if not isinstance(messages, ValidationError): + messages = ValidationError(messages) + self.error_dict[field] = messages.error_list + + elif isinstance(message, list): + self.error_list = [] + for message in message: + # Normalize plain strings to instances of ValidationError. + if not isinstance(message, ValidationError): + message = ValidationError(message) + if hasattr(message, "error_dict"): + self.error_list.extend(sum(message.error_dict.values(), [])) + else: + self.error_list.extend(message.error_list) + + else: + self.message = message + self.code = code + self.params = params + self.error_list = [self] + + @property + def message_dict(self): + # Trigger an AttributeError if this ValidationError + # doesn't have an error_dict. + getattr(self, "error_dict") + + return dict(self) + + @property + def messages(self): + if hasattr(self, "error_dict"): + return sum(dict(self).values(), []) + return list(self) + + def update_error_dict(self, error_dict): + if hasattr(self, "error_dict"): + for field, error_list in self.error_dict.items(): + error_dict.setdefault(field, []).extend(error_list) + else: + error_dict.setdefault(NON_FIELD_ERRORS, []).extend(self.error_list) + return error_dict + + def __iter__(self): + if hasattr(self, "error_dict"): + for field, errors in self.error_dict.items(): + yield field, list(ValidationError(errors)) + else: + for error in self.error_list: + message = error.message + if error.params: + message %= error.params + yield str(message) + + def __str__(self): + if hasattr(self, "error_dict"): + return repr(dict(self)) + return repr(list(self)) + + def __repr__(self): + return "ValidationError(%s)" % self + + def __eq__(self, other): + if not isinstance(other, ValidationError): + return NotImplemented + return hash(self) == hash(other) + + def __hash__(self): + if hasattr(self, "message"): + return hash( + ( + self.message, + self.code, + make_hashable(self.params), + ) + ) + if hasattr(self, "error_dict"): + return hash(make_hashable(self.error_dict)) + return hash(tuple(sorted(self.error_list, key=operator.attrgetter("message")))) + + +class EmptyResultSet(Exception): + """A database query predicate is impossible.""" + + pass + + +class FullResultSet(Exception): + """A database query predicate is matches everything.""" + + pass + + +class SynchronousOnlyOperation(Exception): + """The user tried to call a sync-only function from an async context.""" + + pass diff --git a/testbed/django__django/django/core/files/temp.py b/testbed/django__django/django/core/files/temp.py new file mode 100644 index 0000000000000000000000000000000000000000..5bd31dd5f23ad1a7f08a0e8e41f0e8771c1f0b2b --- /dev/null +++ b/testbed/django__django/django/core/files/temp.py @@ -0,0 +1,79 @@ +""" +The temp module provides a NamedTemporaryFile that can be reopened in the same +process on any platform. Most platforms use the standard Python +tempfile.NamedTemporaryFile class, but Windows users are given a custom class. + +This is needed because the Python implementation of NamedTemporaryFile uses the +O_TEMPORARY flag under Windows, which prevents the file from being reopened +if the same flag is not provided [1][2]. Note that this does not address the +more general issue of opening a file for writing and reading in multiple +processes in a manner that works across platforms. + +The custom version of NamedTemporaryFile doesn't support the same keyword +arguments available in tempfile.NamedTemporaryFile. + +1: https://mail.python.org/pipermail/python-list/2005-December/336957.html +2: https://bugs.python.org/issue14243 +""" + +import os +import tempfile + +from django.core.files.utils import FileProxyMixin + +__all__ = ( + "NamedTemporaryFile", + "gettempdir", +) + + +if os.name == "nt": + + class TemporaryFile(FileProxyMixin): + """ + Temporary file object constructor that supports reopening of the + temporary file in Windows. + + Unlike tempfile.NamedTemporaryFile from the standard library, + __init__() doesn't support the 'delete', 'buffering', 'encoding', or + 'newline' keyword arguments. + """ + + def __init__(self, mode="w+b", bufsize=-1, suffix="", prefix="", dir=None): + fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir) + self.name = name + self.file = os.fdopen(fd, mode, bufsize) + self.close_called = False + + # Because close can be called during shutdown + # we need to cache os.unlink and access it + # as self.unlink only + unlink = os.unlink + + def close(self): + if not self.close_called: + self.close_called = True + try: + self.file.close() + except OSError: + pass + try: + self.unlink(self.name) + except OSError: + pass + + def __del__(self): + self.close() + + def __enter__(self): + self.file.__enter__() + return self + + def __exit__(self, exc, value, tb): + self.file.__exit__(exc, value, tb) + + NamedTemporaryFile = TemporaryFile +else: + NamedTemporaryFile = tempfile.NamedTemporaryFile + +gettempdir = tempfile.gettempdir diff --git a/testbed/django__django/django/core/files/utils.py b/testbed/django__django/django/core/files/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..611f932f6ea8f6ec83abd18d7fdb4e4c451f347f --- /dev/null +++ b/testbed/django__django/django/core/files/utils.py @@ -0,0 +1,78 @@ +import os +import pathlib + +from django.core.exceptions import SuspiciousFileOperation + + +def validate_file_name(name, allow_relative_path=False): + # Remove potentially dangerous names + if os.path.basename(name) in {"", ".", ".."}: + raise SuspiciousFileOperation("Could not derive file name from '%s'" % name) + + if allow_relative_path: + # Use PurePosixPath() because this branch is checked only in + # FileField.generate_filename() where all file paths are expected to be + # Unix style (with forward slashes). + path = pathlib.PurePosixPath(name) + if path.is_absolute() or ".." in path.parts: + raise SuspiciousFileOperation( + "Detected path traversal attempt in '%s'" % name + ) + elif name != os.path.basename(name): + raise SuspiciousFileOperation("File name '%s' includes path elements" % name) + + return name + + +class FileProxyMixin: + """ + A mixin class used to forward file methods to an underlying file + object. The internal file object has to be called "file":: + + class FileProxy(FileProxyMixin): + def __init__(self, file): + self.file = file + """ + + encoding = property(lambda self: self.file.encoding) + fileno = property(lambda self: self.file.fileno) + flush = property(lambda self: self.file.flush) + isatty = property(lambda self: self.file.isatty) + newlines = property(lambda self: self.file.newlines) + read = property(lambda self: self.file.read) + readinto = property(lambda self: self.file.readinto) + readline = property(lambda self: self.file.readline) + readlines = property(lambda self: self.file.readlines) + seek = property(lambda self: self.file.seek) + tell = property(lambda self: self.file.tell) + truncate = property(lambda self: self.file.truncate) + write = property(lambda self: self.file.write) + writelines = property(lambda self: self.file.writelines) + + @property + def closed(self): + return not self.file or self.file.closed + + def readable(self): + if self.closed: + return False + if hasattr(self.file, "readable"): + return self.file.readable() + return True + + def writable(self): + if self.closed: + return False + if hasattr(self.file, "writable"): + return self.file.writable() + return "w" in getattr(self.file, "mode", "") + + def seekable(self): + if self.closed: + return False + if hasattr(self.file, "seekable"): + return self.file.seekable() + return True + + def __iter__(self): + return iter(self.file) diff --git a/testbed/django__django/django/core/mail/__init__.py b/testbed/django__django/django/core/mail/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dc63e8702c68137b287031debe910aeb83016394 --- /dev/null +++ b/testbed/django__django/django/core/mail/__init__.py @@ -0,0 +1,154 @@ +""" +Tools for sending email. +""" +from django.conf import settings + +# Imported for backwards compatibility and for the sake +# of a cleaner namespace. These symbols used to be in +# django/core/mail.py before the introduction of email +# backends and the subsequent reorganization (See #10355) +from django.core.mail.message import ( + DEFAULT_ATTACHMENT_MIME_TYPE, + BadHeaderError, + EmailMessage, + EmailMultiAlternatives, + SafeMIMEMultipart, + SafeMIMEText, + forbid_multi_line_headers, + make_msgid, +) +from django.core.mail.utils import DNS_NAME, CachedDnsName +from django.utils.module_loading import import_string + +__all__ = [ + "CachedDnsName", + "DNS_NAME", + "EmailMessage", + "EmailMultiAlternatives", + "SafeMIMEText", + "SafeMIMEMultipart", + "DEFAULT_ATTACHMENT_MIME_TYPE", + "make_msgid", + "BadHeaderError", + "forbid_multi_line_headers", + "get_connection", + "send_mail", + "send_mass_mail", + "mail_admins", + "mail_managers", +] + + +def get_connection(backend=None, fail_silently=False, **kwds): + """Load an email backend and return an instance of it. + + If backend is None (default), use settings.EMAIL_BACKEND. + + Both fail_silently and other keyword arguments are used in the + constructor of the backend. + """ + klass = import_string(backend or settings.EMAIL_BACKEND) + return klass(fail_silently=fail_silently, **kwds) + + +def send_mail( + subject, + message, + from_email, + recipient_list, + fail_silently=False, + auth_user=None, + auth_password=None, + connection=None, + html_message=None, +): + """ + Easy wrapper for sending a single message to a recipient list. All members + of the recipient list will see the other recipients in the 'To' field. + + If from_email is None, use the DEFAULT_FROM_EMAIL setting. + If auth_user is None, use the EMAIL_HOST_USER setting. + If auth_password is None, use the EMAIL_HOST_PASSWORD setting. + + Note: The API for this method is frozen. New code wanting to extend the + functionality should use the EmailMessage class directly. + """ + connection = connection or get_connection( + username=auth_user, + password=auth_password, + fail_silently=fail_silently, + ) + mail = EmailMultiAlternatives( + subject, message, from_email, recipient_list, connection=connection + ) + if html_message: + mail.attach_alternative(html_message, "text/html") + + return mail.send() + + +def send_mass_mail( + datatuple, fail_silently=False, auth_user=None, auth_password=None, connection=None +): + """ + Given a datatuple of (subject, message, from_email, recipient_list), send + each message to each recipient list. Return the number of emails sent. + + If from_email is None, use the DEFAULT_FROM_EMAIL setting. + If auth_user and auth_password are set, use them to log in. + If auth_user is None, use the EMAIL_HOST_USER setting. + If auth_password is None, use the EMAIL_HOST_PASSWORD setting. + + Note: The API for this method is frozen. New code wanting to extend the + functionality should use the EmailMessage class directly. + """ + connection = connection or get_connection( + username=auth_user, + password=auth_password, + fail_silently=fail_silently, + ) + messages = [ + EmailMessage(subject, message, sender, recipient, connection=connection) + for subject, message, sender, recipient in datatuple + ] + return connection.send_messages(messages) + + +def mail_admins( + subject, message, fail_silently=False, connection=None, html_message=None +): + """Send a message to the admins, as defined by the ADMINS setting.""" + if not settings.ADMINS: + return + if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.ADMINS): + raise ValueError("The ADMINS setting must be a list of 2-tuples.") + mail = EmailMultiAlternatives( + "%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject), + message, + settings.SERVER_EMAIL, + [a[1] for a in settings.ADMINS], + connection=connection, + ) + if html_message: + mail.attach_alternative(html_message, "text/html") + mail.send(fail_silently=fail_silently) + + +def mail_managers( + subject, message, fail_silently=False, connection=None, html_message=None +): + """Send a message to the managers, as defined by the MANAGERS setting.""" + if not settings.MANAGERS: + return + if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.MANAGERS): + raise ValueError("The MANAGERS setting must be a list of 2-tuples.") + mail = EmailMultiAlternatives( + "%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject), + message, + settings.SERVER_EMAIL, + [a[1] for a in settings.MANAGERS], + connection=connection, + ) + if html_message: + mail.attach_alternative(html_message, "text/html") + mail.send(fail_silently=fail_silently) diff --git a/testbed/django__django/django/core/mail/backends/__init__.py b/testbed/django__django/django/core/mail/backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5973b499b0d303d992a90f1a4368e5873bc33ef2 --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/__init__.py @@ -0,0 +1 @@ +# Mail backends shipped with Django. diff --git a/testbed/django__django/django/core/mail/backends/console.py b/testbed/django__django/django/core/mail/backends/console.py new file mode 100644 index 0000000000000000000000000000000000000000..ee5dd285043372052d2b32efce27754306574f9f --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/console.py @@ -0,0 +1,44 @@ +""" +Email backend that writes messages to console instead of sending them. +""" +import sys +import threading + +from django.core.mail.backends.base import BaseEmailBackend + + +class EmailBackend(BaseEmailBackend): + def __init__(self, *args, **kwargs): + self.stream = kwargs.pop("stream", sys.stdout) + self._lock = threading.RLock() + super().__init__(*args, **kwargs) + + def write_message(self, message): + msg = message.message() + msg_data = msg.as_bytes() + charset = ( + msg.get_charset().get_output_charset() if msg.get_charset() else "utf-8" + ) + msg_data = msg_data.decode(charset) + self.stream.write("%s\n" % msg_data) + self.stream.write("-" * 79) + self.stream.write("\n") + + def send_messages(self, email_messages): + """Write all messages to the stream in a thread-safe way.""" + if not email_messages: + return + msg_count = 0 + with self._lock: + try: + stream_created = self.open() + for message in email_messages: + self.write_message(message) + self.stream.flush() # flush after each message + msg_count += 1 + if stream_created: + self.close() + except Exception: + if not self.fail_silently: + raise + return msg_count diff --git a/testbed/django__django/django/core/mail/backends/dummy.py b/testbed/django__django/django/core/mail/backends/dummy.py new file mode 100644 index 0000000000000000000000000000000000000000..7e47fe7564403ed756d4774e3c3f9dda1427bcdb --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/dummy.py @@ -0,0 +1,10 @@ +""" +Dummy email backend that does nothing. +""" + +from django.core.mail.backends.base import BaseEmailBackend + + +class EmailBackend(BaseEmailBackend): + def send_messages(self, email_messages): + return len(list(email_messages)) diff --git a/testbed/django__django/django/core/mail/backends/filebased.py b/testbed/django__django/django/core/mail/backends/filebased.py new file mode 100644 index 0000000000000000000000000000000000000000..3b2b0371508f37eec29dfce0f0f4bb7588bd11a6 --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/filebased.py @@ -0,0 +1,66 @@ +"""Email backend that writes messages to a file.""" + +import datetime +import os + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend + + +class EmailBackend(ConsoleEmailBackend): + def __init__(self, *args, file_path=None, **kwargs): + self._fname = None + if file_path is not None: + self.file_path = file_path + else: + self.file_path = getattr(settings, "EMAIL_FILE_PATH", None) + self.file_path = os.path.abspath(self.file_path) + try: + os.makedirs(self.file_path, exist_ok=True) + except FileExistsError: + raise ImproperlyConfigured( + "Path for saving email messages exists, but is not a directory: %s" + % self.file_path + ) + except OSError as err: + raise ImproperlyConfigured( + "Could not create directory for saving email messages: %s (%s)" + % (self.file_path, err) + ) + # Make sure that self.file_path is writable. + if not os.access(self.file_path, os.W_OK): + raise ImproperlyConfigured( + "Could not write to directory: %s" % self.file_path + ) + # Finally, call super(). + # Since we're using the console-based backend as a base, + # force the stream to be None, so we don't default to stdout + kwargs["stream"] = None + super().__init__(*args, **kwargs) + + def write_message(self, message): + self.stream.write(message.message().as_bytes() + b"\n") + self.stream.write(b"-" * 79) + self.stream.write(b"\n") + + def _get_filename(self): + """Return a unique file name.""" + if self._fname is None: + timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + fname = "%s-%s.log" % (timestamp, abs(id(self))) + self._fname = os.path.join(self.file_path, fname) + return self._fname + + def open(self): + if self.stream is None: + self.stream = open(self._get_filename(), "ab") + return True + return False + + def close(self): + try: + if self.stream is not None: + self.stream.close() + finally: + self.stream = None diff --git a/testbed/django__django/django/core/mail/backends/locmem.py b/testbed/django__django/django/core/mail/backends/locmem.py new file mode 100644 index 0000000000000000000000000000000000000000..76676973a44b152aa62c3d3b38145769a2f2bef8 --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/locmem.py @@ -0,0 +1,31 @@ +""" +Backend for test environment. +""" + +from django.core import mail +from django.core.mail.backends.base import BaseEmailBackend + + +class EmailBackend(BaseEmailBackend): + """ + An email backend for use during test sessions. + + The test connection stores email messages in a dummy outbox, + rather than sending them out on the wire. + + The dummy outbox is accessible through the outbox instance attribute. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not hasattr(mail, "outbox"): + mail.outbox = [] + + def send_messages(self, messages): + """Redirect messages to the dummy outbox""" + msg_count = 0 + for message in messages: # .message() triggers header validation + message.message() + mail.outbox.append(message) + msg_count += 1 + return msg_count diff --git a/testbed/django__django/django/core/mail/utils.py b/testbed/django__django/django/core/mail/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8143c236d5925ab6e48bf4948b6b9b8907677043 --- /dev/null +++ b/testbed/django__django/django/core/mail/utils.py @@ -0,0 +1,22 @@ +""" +Email message and email sending related helper functions. +""" + +import socket + +from django.utils.encoding import punycode + + +# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of +# seconds, which slows down the restart of the server. +class CachedDnsName: + def __str__(self): + return self.get_fqdn() + + def get_fqdn(self): + if not hasattr(self, "_fqdn"): + self._fqdn = punycode(socket.getfqdn()) + return self._fqdn + + +DNS_NAME = CachedDnsName() diff --git a/testbed/django__django/django/core/management/__init__.py b/testbed/django__django/django/core/management/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0c16447d580f5ef5bbcabbc4883c973ce61cab53 --- /dev/null +++ b/testbed/django__django/django/core/management/__init__.py @@ -0,0 +1,442 @@ +import functools +import os +import pkgutil +import sys +from argparse import ( + _AppendConstAction, + _CountAction, + _StoreConstAction, + _SubParsersAction, +) +from collections import defaultdict +from difflib import get_close_matches +from importlib import import_module + +import django +from django.apps import apps +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.core.management.base import ( + BaseCommand, + CommandError, + CommandParser, + handle_default_options, +) +from django.core.management.color import color_style +from django.utils import autoreload + + +def find_commands(management_dir): + """ + Given a path to a management directory, return a list of all the command + names that are available. + """ + command_dir = os.path.join(management_dir, "commands") + return [ + name + for _, name, is_pkg in pkgutil.iter_modules([command_dir]) + if not is_pkg and not name.startswith("_") + ] + + +def load_command_class(app_name, name): + """ + Given a command name and an application name, return the Command + class instance. Allow all errors raised by the import process + (ImportError, AttributeError) to propagate. + """ + module = import_module("%s.management.commands.%s" % (app_name, name)) + return module.Command() + + +@functools.cache +def get_commands(): + """ + Return a dictionary mapping command names to their callback applications. + + Look for a management.commands package in django.core, and in each + installed application -- if a commands package exists, register all + commands in that package. + + Core commands are always included. If a settings module has been + specified, also include user-defined commands. + + The dictionary is in the format {command_name: app_name}. Key-value + pairs from this dictionary can then be used in calls to + load_command_class(app_name, command_name) + + The dictionary is cached on the first call and reused on subsequent + calls. + """ + commands = {name: "django.core" for name in find_commands(__path__[0])} + + if not settings.configured: + return commands + + for app_config in reversed(apps.get_app_configs()): + path = os.path.join(app_config.path, "management") + commands.update({name: app_config.name for name in find_commands(path)}) + + return commands + + +def call_command(command_name, *args, **options): + """ + Call the given command, with the given options and args/kwargs. + + This is the primary API you should use for calling specific commands. + + `command_name` may be a string or a command object. Using a string is + preferred unless the command object is required for further processing or + testing. + + Some examples: + call_command('migrate') + call_command('shell', plain=True) + call_command('sqlmigrate', 'myapp') + + from django.core.management.commands import flush + cmd = flush.Command() + call_command(cmd, verbosity=0, interactive=False) + # Do something with cmd ... + """ + if isinstance(command_name, BaseCommand): + # Command object passed in. + command = command_name + command_name = command.__class__.__module__.split(".")[-1] + else: + # Load the command object by name. + try: + app_name = get_commands()[command_name] + except KeyError: + raise CommandError("Unknown command: %r" % command_name) + + if isinstance(app_name, BaseCommand): + # If the command is already loaded, use it directly. + command = app_name + else: + command = load_command_class(app_name, command_name) + + # Simulate argument parsing to get the option defaults (see #10080 for details). + parser = command.create_parser("", command_name) + # Use the `dest` option name from the parser option + opt_mapping = { + min(s_opt.option_strings).lstrip("-").replace("-", "_"): s_opt.dest + for s_opt in parser._actions + if s_opt.option_strings + } + arg_options = {opt_mapping.get(key, key): value for key, value in options.items()} + parse_args = [] + for arg in args: + if isinstance(arg, (list, tuple)): + parse_args += map(str, arg) + else: + parse_args.append(str(arg)) + + def get_actions(parser): + # Parser actions and actions from sub-parser choices. + for opt in parser._actions: + if isinstance(opt, _SubParsersAction): + for sub_opt in opt.choices.values(): + yield from get_actions(sub_opt) + else: + yield opt + + parser_actions = list(get_actions(parser)) + mutually_exclusive_required_options = { + opt + for group in parser._mutually_exclusive_groups + for opt in group._group_actions + if group.required + } + # Any required arguments which are passed in via **options must be passed + # to parse_args(). + for opt in parser_actions: + if opt.dest in options and ( + opt.required or opt in mutually_exclusive_required_options + ): + opt_dest_count = sum(v == opt.dest for v in opt_mapping.values()) + if opt_dest_count > 1: + raise TypeError( + f"Cannot pass the dest {opt.dest!r} that matches multiple " + f"arguments via **options." + ) + parse_args.append(min(opt.option_strings)) + if isinstance(opt, (_AppendConstAction, _CountAction, _StoreConstAction)): + continue + value = arg_options[opt.dest] + if isinstance(value, (list, tuple)): + parse_args += map(str, value) + else: + parse_args.append(str(value)) + defaults = parser.parse_args(args=parse_args) + defaults = dict(defaults._get_kwargs(), **arg_options) + # Raise an error if any unknown options were passed. + stealth_options = set(command.base_stealth_options + command.stealth_options) + dest_parameters = {action.dest for action in parser_actions} + valid_options = (dest_parameters | stealth_options).union(opt_mapping) + unknown_options = set(options) - valid_options + if unknown_options: + raise TypeError( + "Unknown option(s) for %s command: %s. " + "Valid options are: %s." + % ( + command_name, + ", ".join(sorted(unknown_options)), + ", ".join(sorted(valid_options)), + ) + ) + # Move positional args out of options to mimic legacy optparse + args = defaults.pop("args", ()) + if "skip_checks" not in options: + defaults["skip_checks"] = True + + return command.execute(*args, **defaults) + + +class ManagementUtility: + """ + Encapsulate the logic of the django-admin and manage.py utilities. + """ + + def __init__(self, argv=None): + self.argv = argv or sys.argv[:] + self.prog_name = os.path.basename(self.argv[0]) + if self.prog_name == "__main__.py": + self.prog_name = "python -m django" + self.settings_exception = None + + def main_help_text(self, commands_only=False): + """Return the script's main help text, as a string.""" + if commands_only: + usage = sorted(get_commands()) + else: + usage = [ + "", + "Type '%s help ' for help on a specific subcommand." + % self.prog_name, + "", + "Available subcommands:", + ] + commands_dict = defaultdict(lambda: []) + for name, app in get_commands().items(): + if app == "django.core": + app = "django" + else: + app = app.rpartition(".")[-1] + commands_dict[app].append(name) + style = color_style() + for app in sorted(commands_dict): + usage.append("") + usage.append(style.NOTICE("[%s]" % app)) + for name in sorted(commands_dict[app]): + usage.append(" %s" % name) + # Output an extra note if settings are not properly configured + if self.settings_exception is not None: + usage.append( + style.NOTICE( + "Note that only Django core commands are listed " + "as settings are not properly configured (error: %s)." + % self.settings_exception + ) + ) + + return "\n".join(usage) + + def fetch_command(self, subcommand): + """ + Try to fetch the given subcommand, printing a message with the + appropriate command called from the command line (usually + "django-admin" or "manage.py") if it can't be found. + """ + # Get commands outside of try block to prevent swallowing exceptions + commands = get_commands() + try: + app_name = commands[subcommand] + except KeyError: + if os.environ.get("DJANGO_SETTINGS_MODULE"): + # If `subcommand` is missing due to misconfigured settings, the + # following line will retrigger an ImproperlyConfigured exception + # (get_commands() swallows the original one) so the user is + # informed about it. + settings.INSTALLED_APPS + elif not settings.configured: + sys.stderr.write("No Django settings specified.\n") + possible_matches = get_close_matches(subcommand, commands) + sys.stderr.write("Unknown command: %r" % subcommand) + if possible_matches: + sys.stderr.write(". Did you mean %s?" % possible_matches[0]) + sys.stderr.write("\nType '%s help' for usage.\n" % self.prog_name) + sys.exit(1) + if isinstance(app_name, BaseCommand): + # If the command is already loaded, use it directly. + klass = app_name + else: + klass = load_command_class(app_name, subcommand) + return klass + + def autocomplete(self): + """ + Output completion suggestions for BASH. + + The output of this function is passed to BASH's `COMPREPLY` variable + and treated as completion suggestions. `COMPREPLY` expects a space + separated string as the result. + + The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used + to get information about the cli input. Please refer to the BASH + man-page for more information about this variables. + + Subcommand options are saved as pairs. A pair consists of + the long option string (e.g. '--exclude') and a boolean + value indicating if the option requires arguments. When printing to + stdout, an equal sign is appended to options which require arguments. + + Note: If debugging this function, it is recommended to write the debug + output in a separate file. Otherwise the debug output will be treated + and formatted as potential completion suggestions. + """ + # Don't complete if user hasn't sourced bash_completion file. + if "DJANGO_AUTO_COMPLETE" not in os.environ: + return + + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) + + try: + curr = cwords[cword - 1] + except IndexError: + curr = "" + + subcommands = [*get_commands(), "help"] + options = [("--help", False)] + + # subcommand + if cword == 1: + print(" ".join(sorted(filter(lambda x: x.startswith(curr), subcommands)))) + # subcommand options + # special case: the 'help' subcommand has no options + elif cwords[0] in subcommands and cwords[0] != "help": + subcommand_cls = self.fetch_command(cwords[0]) + # special case: add the names of installed apps to options + if cwords[0] in ("dumpdata", "sqlmigrate", "sqlsequencereset", "test"): + try: + app_configs = apps.get_app_configs() + # Get the last part of the dotted path as the app name. + options.extend((app_config.label, 0) for app_config in app_configs) + except ImportError: + # Fail silently if DJANGO_SETTINGS_MODULE isn't set. The + # user will find out once they execute the command. + pass + parser = subcommand_cls.create_parser("", cwords[0]) + options.extend( + (min(s_opt.option_strings), s_opt.nargs != 0) + for s_opt in parser._actions + if s_opt.option_strings + ) + # filter out previously specified options from available options + prev_opts = {x.split("=")[0] for x in cwords[1 : cword - 1]} + options = (opt for opt in options if opt[0] not in prev_opts) + + # filter options by current input + options = sorted((k, v) for k, v in options if k.startswith(curr)) + for opt_label, require_arg in options: + # append '=' to options which require args + if require_arg: + opt_label += "=" + print(opt_label) + # Exit code of the bash completion function is never passed back to + # the user, so it's safe to always exit with 0. + # For more details see #25420. + sys.exit(0) + + def execute(self): + """ + Given the command-line arguments, figure out which subcommand is being + run, create a parser appropriate to that command, and run it. + """ + try: + subcommand = self.argv[1] + except IndexError: + subcommand = "help" # Display help if no arguments were given. + + # Preprocess options to extract --settings and --pythonpath. + # These options could affect the commands that are available, so they + # must be processed early. + parser = CommandParser( + prog=self.prog_name, + usage="%(prog)s subcommand [options] [args]", + add_help=False, + allow_abbrev=False, + ) + parser.add_argument("--settings") + parser.add_argument("--pythonpath") + parser.add_argument("args", nargs="*") # catch-all + try: + options, args = parser.parse_known_args(self.argv[2:]) + handle_default_options(options) + except CommandError: + pass # Ignore any option errors at this point. + + try: + settings.INSTALLED_APPS + except ImproperlyConfigured as exc: + self.settings_exception = exc + except ImportError as exc: + self.settings_exception = exc + + if settings.configured: + # Start the auto-reloading dev server even if the code is broken. + # The hardcoded condition is a code smell but we can't rely on a + # flag on the command class because we haven't located it yet. + if subcommand == "runserver" and "--noreload" not in self.argv: + try: + autoreload.check_errors(django.setup)() + except Exception: + # The exception will be raised later in the child process + # started by the autoreloader. Pretend it didn't happen by + # loading an empty list of applications. + apps.all_models = defaultdict(dict) + apps.app_configs = {} + apps.apps_ready = apps.models_ready = apps.ready = True + + # Remove options not compatible with the built-in runserver + # (e.g. options for the contrib.staticfiles' runserver). + # Changes here require manually testing as described in + # #27522. + _parser = self.fetch_command("runserver").create_parser( + "django", "runserver" + ) + _options, _args = _parser.parse_known_args(self.argv[2:]) + for _arg in _args: + self.argv.remove(_arg) + + # In all other cases, django.setup() is required to succeed. + else: + django.setup() + + self.autocomplete() + + if subcommand == "help": + if "--commands" in args: + sys.stdout.write(self.main_help_text(commands_only=True) + "\n") + elif not options.args: + sys.stdout.write(self.main_help_text() + "\n") + else: + self.fetch_command(options.args[0]).print_help( + self.prog_name, options.args[0] + ) + # Special-cases: We want 'django-admin --version' and + # 'django-admin --help' to work, for backwards compatibility. + elif subcommand == "version" or self.argv[1:] == ["--version"]: + sys.stdout.write(django.get_version() + "\n") + elif self.argv[1:] in (["--help"], ["-h"]): + sys.stdout.write(self.main_help_text() + "\n") + else: + self.fetch_command(subcommand).run_from_argv(self.argv) + + +def execute_from_command_line(argv=None): + """Run a ManagementUtility.""" + utility = ManagementUtility(argv) + utility.execute() diff --git a/testbed/django__django/django/core/management/base.py b/testbed/django__django/django/core/management/base.py new file mode 100644 index 0000000000000000000000000000000000000000..631c761c004a30c4a635283131016ccd8fac0a25 --- /dev/null +++ b/testbed/django__django/django/core/management/base.py @@ -0,0 +1,688 @@ +""" +Base classes for writing management commands (named commands which can +be executed through ``django-admin`` or ``manage.py``). +""" +import argparse +import os +import sys +from argparse import ArgumentParser, HelpFormatter +from functools import partial +from io import TextIOBase + +import django +from django.core import checks +from django.core.exceptions import ImproperlyConfigured +from django.core.management.color import color_style, no_style +from django.db import DEFAULT_DB_ALIAS, connections + +ALL_CHECKS = "__all__" + + +class CommandError(Exception): + """ + Exception class indicating a problem while executing a management + command. + + If this exception is raised during the execution of a management + command, it will be caught and turned into a nicely-printed error + message to the appropriate output stream (i.e., stderr); as a + result, raising this exception (with a sensible description of the + error) is the preferred way to indicate that something has gone + wrong in the execution of a command. + """ + + def __init__(self, *args, returncode=1, **kwargs): + self.returncode = returncode + super().__init__(*args, **kwargs) + + +class SystemCheckError(CommandError): + """ + The system check framework detected unrecoverable errors. + """ + + pass + + +class CommandParser(ArgumentParser): + """ + Customized ArgumentParser class to improve some error messages and prevent + SystemExit in several occasions, as SystemExit is unacceptable when a + command is called programmatically. + """ + + def __init__( + self, *, missing_args_message=None, called_from_command_line=None, **kwargs + ): + self.missing_args_message = missing_args_message + self.called_from_command_line = called_from_command_line + super().__init__(**kwargs) + + def parse_args(self, args=None, namespace=None): + # Catch missing argument for a better error message + if self.missing_args_message and not ( + args or any(not arg.startswith("-") for arg in args) + ): + self.error(self.missing_args_message) + return super().parse_args(args, namespace) + + def error(self, message): + if self.called_from_command_line: + super().error(message) + else: + raise CommandError("Error: %s" % message) + + def add_subparsers(self, **kwargs): + parser_class = kwargs.get("parser_class", type(self)) + if issubclass(parser_class, CommandParser): + kwargs["parser_class"] = partial( + parser_class, + called_from_command_line=self.called_from_command_line, + ) + return super().add_subparsers(**kwargs) + + +def handle_default_options(options): + """ + Include any default options that all commands should accept here + so that ManagementUtility can handle them before searching for + user commands. + """ + if options.settings: + os.environ["DJANGO_SETTINGS_MODULE"] = options.settings + if options.pythonpath: + sys.path.insert(0, options.pythonpath) + + +def no_translations(handle_func): + """Decorator that forces a command to run with translations deactivated.""" + + def wrapper(*args, **kwargs): + from django.utils import translation + + saved_locale = translation.get_language() + translation.deactivate_all() + try: + res = handle_func(*args, **kwargs) + finally: + if saved_locale is not None: + translation.activate(saved_locale) + return res + + return wrapper + + +class DjangoHelpFormatter(HelpFormatter): + """ + Customized formatter so that command-specific arguments appear in the + --help output before arguments common to all commands. + """ + + show_last = { + "--version", + "--verbosity", + "--traceback", + "--settings", + "--pythonpath", + "--no-color", + "--force-color", + "--skip-checks", + } + + def _reordered_actions(self, actions): + return sorted( + actions, key=lambda a: set(a.option_strings) & self.show_last != set() + ) + + def add_usage(self, usage, actions, *args, **kwargs): + super().add_usage(usage, self._reordered_actions(actions), *args, **kwargs) + + def add_arguments(self, actions): + super().add_arguments(self._reordered_actions(actions)) + + +class OutputWrapper(TextIOBase): + """ + Wrapper around stdout/stderr + """ + + @property + def style_func(self): + return self._style_func + + @style_func.setter + def style_func(self, style_func): + if style_func and self.isatty(): + self._style_func = style_func + else: + self._style_func = lambda x: x + + def __init__(self, out, ending="\n"): + self._out = out + self.style_func = None + self.ending = ending + + def __getattr__(self, name): + return getattr(self._out, name) + + def flush(self): + if hasattr(self._out, "flush"): + self._out.flush() + + def isatty(self): + return hasattr(self._out, "isatty") and self._out.isatty() + + def write(self, msg="", style_func=None, ending=None): + ending = self.ending if ending is None else ending + if ending and not msg.endswith(ending): + msg += ending + style_func = style_func or self.style_func + self._out.write(style_func(msg)) + + +class BaseCommand: + """ + The base class from which all management commands ultimately + derive. + + Use this class if you want access to all of the mechanisms which + parse the command-line arguments and work out what code to call in + response; if you don't need to change any of that behavior, + consider using one of the subclasses defined in this file. + + If you are interested in overriding/customizing various aspects of + the command-parsing and -execution behavior, the normal flow works + as follows: + + 1. ``django-admin`` or ``manage.py`` loads the command class + and calls its ``run_from_argv()`` method. + + 2. The ``run_from_argv()`` method calls ``create_parser()`` to get + an ``ArgumentParser`` for the arguments, parses them, performs + any environment changes requested by options like + ``pythonpath``, and then calls the ``execute()`` method, + passing the parsed arguments. + + 3. The ``execute()`` method attempts to carry out the command by + calling the ``handle()`` method with the parsed arguments; any + output produced by ``handle()`` will be printed to standard + output and, if the command is intended to produce a block of + SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``. + + 4. If ``handle()`` or ``execute()`` raised any exception (e.g. + ``CommandError``), ``run_from_argv()`` will instead print an error + message to ``stderr``. + + Thus, the ``handle()`` method is typically the starting point for + subclasses; many built-in commands and command types either place + all of their logic in ``handle()``, or perform some additional + parsing work in ``handle()`` and then delegate from it to more + specialized methods as needed. + + Several attributes affect behavior at various steps along the way: + + ``help`` + A short description of the command, which will be printed in + help messages. + + ``output_transaction`` + A boolean indicating whether the command outputs SQL + statements; if ``True``, the output will automatically be + wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is + ``False``. + + ``requires_migrations_checks`` + A boolean; if ``True``, the command prints a warning if the set of + migrations on disk don't match the migrations in the database. + + ``requires_system_checks`` + A list or tuple of tags, e.g. [Tags.staticfiles, Tags.models]. System + checks registered in the chosen tags will be checked for errors prior + to executing the command. The value '__all__' can be used to specify + that all system checks should be performed. Default value is '__all__'. + + To validate an individual application's models + rather than all applications' models, call + ``self.check(app_configs)`` from ``handle()``, where ``app_configs`` + is the list of application's configuration provided by the + app registry. + + ``stealth_options`` + A tuple of any options the command uses which aren't defined by the + argument parser. + """ + + # Metadata about this command. + help = "" + + # Configuration shortcuts that alter various logic. + _called_from_command_line = False + output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;" + requires_migrations_checks = False + requires_system_checks = "__all__" + # Arguments, common to all commands, which aren't defined by the argument + # parser. + base_stealth_options = ("stderr", "stdout") + # Command-specific options not defined by the argument parser. + stealth_options = () + suppressed_base_arguments = set() + + def __init__(self, stdout=None, stderr=None, no_color=False, force_color=False): + self.stdout = OutputWrapper(stdout or sys.stdout) + self.stderr = OutputWrapper(stderr or sys.stderr) + if no_color and force_color: + raise CommandError("'no_color' and 'force_color' can't be used together.") + if no_color: + self.style = no_style() + else: + self.style = color_style(force_color) + self.stderr.style_func = self.style.ERROR + if ( + not isinstance(self.requires_system_checks, (list, tuple)) + and self.requires_system_checks != ALL_CHECKS + ): + raise TypeError("requires_system_checks must be a list or tuple.") + + def get_version(self): + """ + Return the Django version, which should be correct for all built-in + Django commands. User-supplied commands can override this method to + return their own version. + """ + return django.get_version() + + def create_parser(self, prog_name, subcommand, **kwargs): + """ + Create and return the ``ArgumentParser`` which will be used to + parse the arguments to this command. + """ + kwargs.setdefault("formatter_class", DjangoHelpFormatter) + parser = CommandParser( + prog="%s %s" % (os.path.basename(prog_name), subcommand), + description=self.help or None, + missing_args_message=getattr(self, "missing_args_message", None), + called_from_command_line=getattr(self, "_called_from_command_line", None), + **kwargs, + ) + self.add_base_argument( + parser, + "--version", + action="version", + version=self.get_version(), + help="Show program's version number and exit.", + ) + self.add_base_argument( + parser, + "-v", + "--verbosity", + default=1, + type=int, + choices=[0, 1, 2, 3], + help=( + "Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, " + "3=very verbose output" + ), + ) + self.add_base_argument( + parser, + "--settings", + help=( + "The Python path to a settings module, e.g. " + '"myproject.settings.main". If this isn\'t provided, the ' + "DJANGO_SETTINGS_MODULE environment variable will be used." + ), + ) + self.add_base_argument( + parser, + "--pythonpath", + help=( + "A directory to add to the Python path, e.g. " + '"/home/djangoprojects/myproject".' + ), + ) + self.add_base_argument( + parser, + "--traceback", + action="store_true", + help="Raise on CommandError exceptions.", + ) + self.add_base_argument( + parser, + "--no-color", + action="store_true", + help="Don't colorize the command output.", + ) + self.add_base_argument( + parser, + "--force-color", + action="store_true", + help="Force colorization of the command output.", + ) + if self.requires_system_checks: + parser.add_argument( + "--skip-checks", + action="store_true", + help="Skip system checks.", + ) + self.add_arguments(parser) + return parser + + def add_arguments(self, parser): + """ + Entry point for subclassed commands to add custom arguments. + """ + pass + + def add_base_argument(self, parser, *args, **kwargs): + """ + Call the parser's add_argument() method, suppressing the help text + according to BaseCommand.suppressed_base_arguments. + """ + for arg in args: + if arg in self.suppressed_base_arguments: + kwargs["help"] = argparse.SUPPRESS + break + parser.add_argument(*args, **kwargs) + + def print_help(self, prog_name, subcommand): + """ + Print the help message for this command, derived from + ``self.usage()``. + """ + parser = self.create_parser(prog_name, subcommand) + parser.print_help() + + def run_from_argv(self, argv): + """ + Set up any environment changes requested (e.g., Python path + and Django settings), then run this command. If the + command raises a ``CommandError``, intercept it and print it sensibly + to stderr. If the ``--traceback`` option is present or the raised + ``Exception`` is not ``CommandError``, raise it. + """ + self._called_from_command_line = True + parser = self.create_parser(argv[0], argv[1]) + + options = parser.parse_args(argv[2:]) + cmd_options = vars(options) + # Move positional args out of options to mimic legacy optparse + args = cmd_options.pop("args", ()) + handle_default_options(options) + try: + self.execute(*args, **cmd_options) + except CommandError as e: + if options.traceback: + raise + + # SystemCheckError takes care of its own formatting. + if isinstance(e, SystemCheckError): + self.stderr.write(str(e), lambda x: x) + else: + self.stderr.write("%s: %s" % (e.__class__.__name__, e)) + sys.exit(e.returncode) + finally: + try: + connections.close_all() + except ImproperlyConfigured: + # Ignore if connections aren't setup at this point (e.g. no + # configured settings). + pass + + def execute(self, *args, **options): + """ + Try to execute this command, performing system checks if needed (as + controlled by the ``requires_system_checks`` attribute, except if + force-skipped). + """ + if options["force_color"] and options["no_color"]: + raise CommandError( + "The --no-color and --force-color options can't be used together." + ) + if options["force_color"]: + self.style = color_style(force_color=True) + elif options["no_color"]: + self.style = no_style() + self.stderr.style_func = None + if options.get("stdout"): + self.stdout = OutputWrapper(options["stdout"]) + if options.get("stderr"): + self.stderr = OutputWrapper(options["stderr"]) + + if self.requires_system_checks and not options["skip_checks"]: + if self.requires_system_checks == ALL_CHECKS: + self.check() + else: + self.check(tags=self.requires_system_checks) + if self.requires_migrations_checks: + self.check_migrations() + output = self.handle(*args, **options) + if output: + if self.output_transaction: + connection = connections[options.get("database", DEFAULT_DB_ALIAS)] + output = "%s\n%s\n%s" % ( + self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()), + output, + self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()), + ) + self.stdout.write(output) + return output + + def check( + self, + app_configs=None, + tags=None, + display_num_errors=False, + include_deployment_checks=False, + fail_level=checks.ERROR, + databases=None, + ): + """ + Use the system check framework to validate entire Django project. + Raise CommandError for any serious message (error or critical errors). + If there are only light messages (like warnings), print them to stderr + and don't raise an exception. + """ + all_issues = checks.run_checks( + app_configs=app_configs, + tags=tags, + include_deployment_checks=include_deployment_checks, + databases=databases, + ) + + header, body, footer = "", "", "" + visible_issue_count = 0 # excludes silenced warnings + + if all_issues: + debugs = [ + e for e in all_issues if e.level < checks.INFO and not e.is_silenced() + ] + infos = [ + e + for e in all_issues + if checks.INFO <= e.level < checks.WARNING and not e.is_silenced() + ] + warnings = [ + e + for e in all_issues + if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced() + ] + errors = [ + e + for e in all_issues + if checks.ERROR <= e.level < checks.CRITICAL and not e.is_silenced() + ] + criticals = [ + e + for e in all_issues + if checks.CRITICAL <= e.level and not e.is_silenced() + ] + sorted_issues = [ + (criticals, "CRITICALS"), + (errors, "ERRORS"), + (warnings, "WARNINGS"), + (infos, "INFOS"), + (debugs, "DEBUGS"), + ] + + for issues, group_name in sorted_issues: + if issues: + visible_issue_count += len(issues) + formatted = ( + self.style.ERROR(str(e)) + if e.is_serious() + else self.style.WARNING(str(e)) + for e in issues + ) + formatted = "\n".join(sorted(formatted)) + body += "\n%s:\n%s\n" % (group_name, formatted) + + if visible_issue_count: + header = "System check identified some issues:\n" + + if display_num_errors: + if visible_issue_count: + footer += "\n" + footer += "System check identified %s (%s silenced)." % ( + "no issues" + if visible_issue_count == 0 + else "1 issue" + if visible_issue_count == 1 + else "%s issues" % visible_issue_count, + len(all_issues) - visible_issue_count, + ) + + if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues): + msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer + raise SystemCheckError(msg) + else: + msg = header + body + footer + + if msg: + if visible_issue_count: + self.stderr.write(msg, lambda x: x) + else: + self.stdout.write(msg) + + def check_migrations(self): + """ + Print a warning if the set of migrations on disk don't match the + migrations in the database. + """ + from django.db.migrations.executor import MigrationExecutor + + try: + executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS]) + except ImproperlyConfigured: + # No databases are configured (or the dummy one) + return + + plan = executor.migration_plan(executor.loader.graph.leaf_nodes()) + if plan: + apps_waiting_migration = sorted( + {migration.app_label for migration, backwards in plan} + ) + self.stdout.write( + self.style.NOTICE( + "\nYou have %(unapplied_migration_count)s unapplied migration(s). " + "Your project may not work properly until you apply the " + "migrations for app(s): %(apps_waiting_migration)s." + % { + "unapplied_migration_count": len(plan), + "apps_waiting_migration": ", ".join(apps_waiting_migration), + } + ) + ) + self.stdout.write( + self.style.NOTICE("Run 'python manage.py migrate' to apply them.") + ) + + def handle(self, *args, **options): + """ + The actual logic of the command. Subclasses must implement + this method. + """ + raise NotImplementedError( + "subclasses of BaseCommand must provide a handle() method" + ) + + +class AppCommand(BaseCommand): + """ + A management command which takes one or more installed application labels + as arguments, and does something with each of them. + + Rather than implementing ``handle()``, subclasses must implement + ``handle_app_config()``, which will be called once for each application. + """ + + missing_args_message = "Enter at least one application label." + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="app_label", + nargs="+", + help="One or more application label.", + ) + + def handle(self, *app_labels, **options): + from django.apps import apps + + try: + app_configs = [apps.get_app_config(app_label) for app_label in app_labels] + except (LookupError, ImportError) as e: + raise CommandError( + "%s. Are you sure your INSTALLED_APPS setting is correct?" % e + ) + output = [] + for app_config in app_configs: + app_output = self.handle_app_config(app_config, **options) + if app_output: + output.append(app_output) + return "\n".join(output) + + def handle_app_config(self, app_config, **options): + """ + Perform the command's actions for app_config, an AppConfig instance + corresponding to an application label given on the command line. + """ + raise NotImplementedError( + "Subclasses of AppCommand must provide a handle_app_config() method." + ) + + +class LabelCommand(BaseCommand): + """ + A management command which takes one or more arbitrary arguments + (labels) on the command line, and does something with each of + them. + + Rather than implementing ``handle()``, subclasses must implement + ``handle_label()``, which will be called once for each label. + + If the arguments should be names of installed applications, use + ``AppCommand`` instead. + """ + + label = "label" + missing_args_message = "Enter at least one %s." % label + + def add_arguments(self, parser): + parser.add_argument("args", metavar=self.label, nargs="+") + + def handle(self, *labels, **options): + output = [] + for label in labels: + label_output = self.handle_label(label, **options) + if label_output: + output.append(label_output) + return "\n".join(output) + + def handle_label(self, label, **options): + """ + Perform the command's actions for ``label``, which will be the + string as given on the command line. + """ + raise NotImplementedError( + "subclasses of LabelCommand must provide a handle_label() method" + ) diff --git a/testbed/django__django/django/core/management/color.py b/testbed/django__django/django/core/management/color.py new file mode 100644 index 0000000000000000000000000000000000000000..229e9b4e4ab5e53c3218439d4337df9f84e52fa9 --- /dev/null +++ b/testbed/django__django/django/core/management/color.py @@ -0,0 +1,113 @@ +""" +Sets up the terminal color scheme. +""" + +import functools +import os +import sys + +from django.utils import termcolors + +try: + import colorama + + colorama.init() +except (ImportError, OSError): + HAS_COLORAMA = False +else: + HAS_COLORAMA = True + + +def supports_color(): + """ + Return True if the running system's terminal supports color, + and False otherwise. + """ + + def vt_codes_enabled_in_windows_registry(): + """ + Check the Windows Registry to see if VT code handling has been enabled + by default, see https://superuser.com/a/1300251/447564. + """ + try: + # winreg is only available on Windows. + import winreg + except ImportError: + return False + else: + try: + reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Console") + reg_key_value, _ = winreg.QueryValueEx(reg_key, "VirtualTerminalLevel") + except FileNotFoundError: + return False + else: + return reg_key_value == 1 + + # isatty is not always implemented, #6223. + is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() + + return is_a_tty and ( + sys.platform != "win32" + or HAS_COLORAMA + or "ANSICON" in os.environ + or + # Windows Terminal supports VT codes. + "WT_SESSION" in os.environ + or + # Microsoft Visual Studio Code's built-in terminal supports colors. + os.environ.get("TERM_PROGRAM") == "vscode" + or vt_codes_enabled_in_windows_registry() + ) + + +class Style: + pass + + +def make_style(config_string=""): + """ + Create a Style object from the given config_string. + + If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used. + """ + + style = Style() + + color_settings = termcolors.parse_color_setting(config_string) + + # The nocolor palette has all available roles. + # Use that palette as the basis for populating + # the palette as defined in the environment. + for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: + if color_settings: + format = color_settings.get(role, {}) + style_func = termcolors.make_style(**format) + else: + + def style_func(x): + return x + + setattr(style, role, style_func) + + # For backwards compatibility, + # set style for ERROR_OUTPUT == ERROR + style.ERROR_OUTPUT = style.ERROR + + return style + + +@functools.cache +def no_style(): + """ + Return a Style object with no color scheme. + """ + return make_style("nocolor") + + +def color_style(force_color=False): + """ + Return a Style object from the Django color scheme. + """ + if not force_color and not supports_color(): + return no_style() + return make_style(os.environ.get("DJANGO_COLORS", "")) diff --git a/testbed/django__django/django/core/management/commands/__init__.py b/testbed/django__django/django/core/management/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/management/commands/check.py b/testbed/django__django/django/core/management/commands/check.py new file mode 100644 index 0000000000000000000000000000000000000000..7624b853909d84574520988d61e7777f3f55fdb2 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/check.py @@ -0,0 +1,83 @@ +from django.apps import apps +from django.core import checks +from django.core.checks.registry import registry +from django.core.management.base import BaseCommand, CommandError + + +class Command(BaseCommand): + help = "Checks the entire Django project for potential problems." + + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument("args", metavar="app_label", nargs="*") + parser.add_argument( + "--tag", + "-t", + action="append", + dest="tags", + help="Run only checks labeled with given tag.", + ) + parser.add_argument( + "--list-tags", + action="store_true", + help="List available tags.", + ) + parser.add_argument( + "--deploy", + action="store_true", + help="Check deployment settings.", + ) + parser.add_argument( + "--fail-level", + default="ERROR", + choices=["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"], + help=( + "Message level that will cause the command to exit with a " + "non-zero status. Default is ERROR." + ), + ) + parser.add_argument( + "--database", + action="append", + dest="databases", + help="Run database related checks against these aliases.", + ) + + def handle(self, *app_labels, **options): + include_deployment_checks = options["deploy"] + if options["list_tags"]: + self.stdout.write( + "\n".join(sorted(registry.tags_available(include_deployment_checks))) + ) + return + + if app_labels: + app_configs = [apps.get_app_config(app_label) for app_label in app_labels] + else: + app_configs = None + + tags = options["tags"] + if tags: + try: + invalid_tag = next( + tag + for tag in tags + if not checks.tag_exists(tag, include_deployment_checks) + ) + except StopIteration: + # no invalid tags + pass + else: + raise CommandError( + 'There is no system check with the "%s" tag.' % invalid_tag + ) + + self.check( + app_configs=app_configs, + tags=tags, + display_num_errors=True, + include_deployment_checks=include_deployment_checks, + fail_level=getattr(checks, options["fail_level"]), + databases=options["databases"], + ) diff --git a/testbed/django__django/django/core/management/commands/compilemessages.py b/testbed/django__django/django/core/management/commands/compilemessages.py new file mode 100644 index 0000000000000000000000000000000000000000..9ed3ef7c31e78ab70b7a416ece8fec11ede71c8f --- /dev/null +++ b/testbed/django__django/django/core/management/commands/compilemessages.py @@ -0,0 +1,195 @@ +import codecs +import concurrent.futures +import glob +import os +from pathlib import Path + +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import find_command, is_ignored_path, popen_wrapper + + +def has_bom(fn): + with fn.open("rb") as f: + sample = f.read(4) + return sample.startswith( + (codecs.BOM_UTF8, codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE) + ) + + +def is_writable(path): + # Known side effect: updating file access/modified time to current time if + # it is writable. + try: + with open(path, "a"): + os.utime(path, None) + except OSError: + return False + return True + + +class Command(BaseCommand): + help = "Compiles .po files to .mo files for use with builtin gettext support." + + requires_system_checks = [] + + program = "msgfmt" + program_options = ["--check-format"] + + def add_arguments(self, parser): + parser.add_argument( + "--locale", + "-l", + action="append", + default=[], + help="Locale(s) to process (e.g. de_AT). Default is to process all. " + "Can be used multiple times.", + ) + parser.add_argument( + "--exclude", + "-x", + action="append", + default=[], + help="Locales to exclude. Default is none. Can be used multiple times.", + ) + parser.add_argument( + "--use-fuzzy", + "-f", + dest="fuzzy", + action="store_true", + help="Use fuzzy translations.", + ) + parser.add_argument( + "--ignore", + "-i", + action="append", + dest="ignore_patterns", + default=[], + metavar="PATTERN", + help="Ignore directories matching this glob-style pattern. " + "Use multiple times to ignore more.", + ) + + def handle(self, **options): + locale = options["locale"] + exclude = options["exclude"] + ignore_patterns = set(options["ignore_patterns"]) + self.verbosity = options["verbosity"] + if options["fuzzy"]: + self.program_options = self.program_options + ["-f"] + + if find_command(self.program) is None: + raise CommandError( + "Can't find %s. Make sure you have GNU gettext " + "tools 0.15 or newer installed." % self.program + ) + + basedirs = [os.path.join("conf", "locale"), "locale"] + if os.environ.get("DJANGO_SETTINGS_MODULE"): + from django.conf import settings + + basedirs.extend(settings.LOCALE_PATHS) + + # Walk entire tree, looking for locale directories + for dirpath, dirnames, filenames in os.walk(".", topdown=True): + for dirname in dirnames: + if is_ignored_path( + os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns + ): + dirnames.remove(dirname) + elif dirname == "locale": + basedirs.append(os.path.join(dirpath, dirname)) + + # Gather existing directories. + basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs))) + + if not basedirs: + raise CommandError( + "This script should be run from the Django Git " + "checkout or your project or app tree, or with " + "the settings module specified." + ) + + # Build locale list + all_locales = [] + for basedir in basedirs: + locale_dirs = filter(os.path.isdir, glob.glob("%s/*" % basedir)) + all_locales.extend(map(os.path.basename, locale_dirs)) + + # Account for excluded locales + locales = locale or all_locales + locales = set(locales).difference(exclude) + + self.has_errors = False + for basedir in basedirs: + if locales: + dirs = [ + os.path.join(basedir, locale, "LC_MESSAGES") for locale in locales + ] + else: + dirs = [basedir] + locations = [] + for ldir in dirs: + for dirpath, dirnames, filenames in os.walk(ldir): + locations.extend( + (dirpath, f) for f in filenames if f.endswith(".po") + ) + if locations: + self.compile_messages(locations) + + if self.has_errors: + raise CommandError("compilemessages generated one or more errors.") + + def compile_messages(self, locations): + """ + Locations is a list of tuples: [(directory, file), ...] + """ + with concurrent.futures.ThreadPoolExecutor() as executor: + futures = [] + for i, (dirpath, f) in enumerate(locations): + po_path = Path(dirpath) / f + mo_path = po_path.with_suffix(".mo") + try: + if mo_path.stat().st_mtime >= po_path.stat().st_mtime: + if self.verbosity > 0: + self.stdout.write( + "File “%s” is already compiled and up to date." + % po_path + ) + continue + except FileNotFoundError: + pass + if self.verbosity > 0: + self.stdout.write("processing file %s in %s" % (f, dirpath)) + + if has_bom(po_path): + self.stderr.write( + "The %s file has a BOM (Byte Order Mark). Django only " + "supports .po files encoded in UTF-8 and without any BOM." + % po_path + ) + self.has_errors = True + continue + + # Check writability on first location + if i == 0 and not is_writable(mo_path): + self.stderr.write( + "The po files under %s are in a seemingly not writable " + "location. mo files will not be updated/created." % dirpath + ) + self.has_errors = True + return + + args = [self.program, *self.program_options, "-o", mo_path, po_path] + futures.append(executor.submit(popen_wrapper, args)) + + for future in concurrent.futures.as_completed(futures): + output, errors, status = future.result() + if status: + if self.verbosity > 0: + if errors: + self.stderr.write( + "Execution of %s failed: %s" % (self.program, errors) + ) + else: + self.stderr.write("Execution of %s failed" % self.program) + self.has_errors = True diff --git a/testbed/django__django/django/core/management/commands/createcachetable.py b/testbed/django__django/django/core/management/commands/createcachetable.py new file mode 100644 index 0000000000000000000000000000000000000000..65ed1686d20782a0041af4d6b959ee5c11388cc7 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/createcachetable.py @@ -0,0 +1,130 @@ +from django.conf import settings +from django.core.cache import caches +from django.core.cache.backends.db import BaseDatabaseCache +from django.core.management.base import BaseCommand, CommandError +from django.db import ( + DEFAULT_DB_ALIAS, + DatabaseError, + connections, + models, + router, + transaction, +) + + +class Command(BaseCommand): + help = "Creates the tables needed to use the SQL cache backend." + + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="table_name", + nargs="*", + help=( + "Optional table names. Otherwise, settings.CACHES is used to find " + "cache tables." + ), + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help="Nominates a database onto which the cache tables will be " + 'installed. Defaults to the "default" database.', + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Does not create the table, just prints the SQL that would be run.", + ) + + def handle(self, *tablenames, **options): + db = options["database"] + self.verbosity = options["verbosity"] + dry_run = options["dry_run"] + if tablenames: + # Legacy behavior, tablename specified as argument + for tablename in tablenames: + self.create_table(db, tablename, dry_run) + else: + for cache_alias in settings.CACHES: + cache = caches[cache_alias] + if isinstance(cache, BaseDatabaseCache): + self.create_table(db, cache._table, dry_run) + + def create_table(self, database, tablename, dry_run): + cache = BaseDatabaseCache(tablename, {}) + if not router.allow_migrate_model(database, cache.cache_model_class): + return + connection = connections[database] + + if tablename in connection.introspection.table_names(): + if self.verbosity > 0: + self.stdout.write("Cache table '%s' already exists." % tablename) + return + + fields = ( + # "key" is a reserved word in MySQL, so use "cache_key" instead. + models.CharField( + name="cache_key", max_length=255, unique=True, primary_key=True + ), + models.TextField(name="value"), + models.DateTimeField(name="expires", db_index=True), + ) + table_output = [] + index_output = [] + qn = connection.ops.quote_name + for f in fields: + field_output = [ + qn(f.name), + f.db_type(connection=connection), + "%sNULL" % ("NOT " if not f.null else ""), + ] + if f.primary_key: + field_output.append("PRIMARY KEY") + elif f.unique: + field_output.append("UNIQUE") + if f.db_index: + unique = "UNIQUE " if f.unique else "" + index_output.append( + "CREATE %sINDEX %s ON %s (%s);" + % ( + unique, + qn("%s_%s" % (tablename, f.name)), + qn(tablename), + qn(f.name), + ) + ) + table_output.append(" ".join(field_output)) + full_statement = ["CREATE TABLE %s (" % qn(tablename)] + for i, line in enumerate(table_output): + full_statement.append( + " %s%s" % (line, "," if i < len(table_output) - 1 else "") + ) + full_statement.append(");") + + full_statement = "\n".join(full_statement) + + if dry_run: + self.stdout.write(full_statement) + for statement in index_output: + self.stdout.write(statement) + return + + with transaction.atomic( + using=database, savepoint=connection.features.can_rollback_ddl + ): + with connection.cursor() as curs: + try: + curs.execute(full_statement) + except DatabaseError as e: + raise CommandError( + "Cache table '%s' could not be created.\nThe error was: %s." + % (tablename, e) + ) + for statement in index_output: + curs.execute(statement) + + if self.verbosity > 1: + self.stdout.write("Cache table '%s' created." % tablename) diff --git a/testbed/django__django/django/core/management/commands/diffsettings.py b/testbed/django__django/django/core/management/commands/diffsettings.py new file mode 100644 index 0000000000000000000000000000000000000000..047e4764a8c65b2db454dc9cb7b06c6ec43a85b1 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/diffsettings.py @@ -0,0 +1,91 @@ +from django.core.management.base import BaseCommand + + +def module_to_dict(module, omittable=lambda k: k.startswith("_") or not k.isupper()): + """Convert a module namespace to a Python dictionary.""" + return {k: repr(getattr(module, k)) for k in dir(module) if not omittable(k)} + + +class Command(BaseCommand): + help = """Displays differences between the current settings.py and Django's + default settings.""" + + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument( + "--all", + action="store_true", + help=( + 'Display all settings, regardless of their value. In "hash" ' + 'mode, default values are prefixed by "###".' + ), + ) + parser.add_argument( + "--default", + metavar="MODULE", + help=( + "The settings module to compare the current settings against. Leave " + "empty to compare against Django's default settings." + ), + ) + parser.add_argument( + "--output", + default="hash", + choices=("hash", "unified"), + help=( + "Selects the output format. 'hash' mode displays each changed " + "setting, with the settings that don't appear in the defaults " + "followed by ###. 'unified' mode prefixes the default setting " + "with a minus sign, followed by the changed setting prefixed " + "with a plus sign." + ), + ) + + def handle(self, **options): + from django.conf import Settings, global_settings, settings + + # Because settings are imported lazily, we need to explicitly load them. + if not settings.configured: + settings._setup() + + user_settings = module_to_dict(settings._wrapped) + default = options["default"] + default_settings = module_to_dict( + Settings(default) if default else global_settings + ) + output_func = { + "hash": self.output_hash, + "unified": self.output_unified, + }[options["output"]] + return "\n".join(output_func(user_settings, default_settings, **options)) + + def output_hash(self, user_settings, default_settings, **options): + # Inspired by Postfix's "postconf -n". + output = [] + for key in sorted(user_settings): + if key not in default_settings: + output.append("%s = %s ###" % (key, user_settings[key])) + elif user_settings[key] != default_settings[key]: + output.append("%s = %s" % (key, user_settings[key])) + elif options["all"]: + output.append("### %s = %s" % (key, user_settings[key])) + return output + + def output_unified(self, user_settings, default_settings, **options): + output = [] + for key in sorted(user_settings): + if key not in default_settings: + output.append( + self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])) + ) + elif user_settings[key] != default_settings[key]: + output.append( + self.style.ERROR("- %s = %s" % (key, default_settings[key])) + ) + output.append( + self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])) + ) + elif options["all"]: + output.append(" %s = %s" % (key, user_settings[key])) + return output diff --git a/testbed/django__django/django/core/management/commands/dumpdata.py b/testbed/django__django/django/core/management/commands/dumpdata.py new file mode 100644 index 0000000000000000000000000000000000000000..cc183517e3022b3a9a10025fa856c8c338b9378c --- /dev/null +++ b/testbed/django__django/django/core/management/commands/dumpdata.py @@ -0,0 +1,281 @@ +import gzip +import os +import warnings + +from django.apps import apps +from django.core import serializers +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import parse_apps_and_model_labels +from django.db import DEFAULT_DB_ALIAS, router + +try: + import bz2 + + has_bz2 = True +except ImportError: + has_bz2 = False + +try: + import lzma + + has_lzma = True +except ImportError: + has_lzma = False + + +class ProxyModelWarning(Warning): + pass + + +class Command(BaseCommand): + help = ( + "Output the contents of the database as a fixture of the given format " + "(using each model's default manager unless --all is specified)." + ) + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="app_label[.ModelName]", + nargs="*", + help=( + "Restricts dumped data to the specified app_label or " + "app_label.ModelName." + ), + ) + parser.add_argument( + "--format", + default="json", + help="Specifies the output serialization format for fixtures.", + ) + parser.add_argument( + "--indent", + type=int, + help="Specifies the indent level to use when pretty-printing output.", + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help="Nominates a specific database to dump fixtures from. " + 'Defaults to the "default" database.', + ) + parser.add_argument( + "-e", + "--exclude", + action="append", + default=[], + help="An app_label or app_label.ModelName to exclude " + "(use multiple --exclude to exclude multiple apps/models).", + ) + parser.add_argument( + "--natural-foreign", + action="store_true", + dest="use_natural_foreign_keys", + help="Use natural foreign keys if they are available.", + ) + parser.add_argument( + "--natural-primary", + action="store_true", + dest="use_natural_primary_keys", + help="Use natural primary keys if they are available.", + ) + parser.add_argument( + "-a", + "--all", + action="store_true", + dest="use_base_manager", + help=( + "Use Django's base manager to dump all models stored in the database, " + "including those that would otherwise be filtered or modified by a " + "custom manager." + ), + ) + parser.add_argument( + "--pks", + dest="primary_keys", + help="Only dump objects with given primary keys. Accepts a comma-separated " + "list of keys. This option only works when you specify one model.", + ) + parser.add_argument( + "-o", "--output", help="Specifies file to which the output is written." + ) + + def handle(self, *app_labels, **options): + format = options["format"] + indent = options["indent"] + using = options["database"] + excludes = options["exclude"] + output = options["output"] + show_traceback = options["traceback"] + use_natural_foreign_keys = options["use_natural_foreign_keys"] + use_natural_primary_keys = options["use_natural_primary_keys"] + use_base_manager = options["use_base_manager"] + pks = options["primary_keys"] + + if pks: + primary_keys = [pk.strip() for pk in pks.split(",")] + else: + primary_keys = [] + + excluded_models, excluded_apps = parse_apps_and_model_labels(excludes) + + if not app_labels: + if primary_keys: + raise CommandError("You can only use --pks option with one model") + app_list = dict.fromkeys( + app_config + for app_config in apps.get_app_configs() + if app_config.models_module is not None + and app_config not in excluded_apps + ) + else: + if len(app_labels) > 1 and primary_keys: + raise CommandError("You can only use --pks option with one model") + app_list = {} + for label in app_labels: + try: + app_label, model_label = label.split(".") + try: + app_config = apps.get_app_config(app_label) + except LookupError as e: + raise CommandError(str(e)) + if app_config.models_module is None or app_config in excluded_apps: + continue + try: + model = app_config.get_model(model_label) + except LookupError: + raise CommandError( + "Unknown model: %s.%s" % (app_label, model_label) + ) + + app_list_value = app_list.setdefault(app_config, []) + + # We may have previously seen an "all-models" request for + # this app (no model qualifier was given). In this case + # there is no need adding specific models to the list. + if app_list_value is not None and model not in app_list_value: + app_list_value.append(model) + except ValueError: + if primary_keys: + raise CommandError( + "You can only use --pks option with one model" + ) + # This is just an app - no model qualifier + app_label = label + try: + app_config = apps.get_app_config(app_label) + except LookupError as e: + raise CommandError(str(e)) + if app_config.models_module is None or app_config in excluded_apps: + continue + app_list[app_config] = None + + # Check that the serialization format exists; this is a shortcut to + # avoid collating all the objects and _then_ failing. + if format not in serializers.get_public_serializer_formats(): + try: + serializers.get_serializer(format) + except serializers.SerializerDoesNotExist: + pass + + raise CommandError("Unknown serialization format: %s" % format) + + def get_objects(count_only=False): + """ + Collate the objects to be serialized. If count_only is True, just + count the number of objects to be serialized. + """ + if use_natural_foreign_keys: + models = serializers.sort_dependencies( + app_list.items(), allow_cycles=True + ) + else: + # There is no need to sort dependencies when natural foreign + # keys are not used. + models = [] + for app_config, model_list in app_list.items(): + if model_list is None: + models.extend(app_config.get_models()) + else: + models.extend(model_list) + for model in models: + if model in excluded_models: + continue + if model._meta.proxy and model._meta.proxy_for_model not in models: + warnings.warn( + "%s is a proxy model and won't be serialized." + % model._meta.label, + category=ProxyModelWarning, + ) + if not model._meta.proxy and router.allow_migrate_model(using, model): + if use_base_manager: + objects = model._base_manager + else: + objects = model._default_manager + + queryset = objects.using(using).order_by(model._meta.pk.name) + if primary_keys: + queryset = queryset.filter(pk__in=primary_keys) + if count_only: + yield queryset.order_by().count() + else: + yield from queryset.iterator() + + try: + self.stdout.ending = None + progress_output = None + object_count = 0 + # If dumpdata is outputting to stdout, there is no way to display progress + if output and self.stdout.isatty() and options["verbosity"] > 0: + progress_output = self.stdout + object_count = sum(get_objects(count_only=True)) + if output: + file_root, file_ext = os.path.splitext(output) + compression_formats = { + ".bz2": (open, {}, file_root), + ".gz": (gzip.open, {}, output), + ".lzma": (open, {}, file_root), + ".xz": (open, {}, file_root), + ".zip": (open, {}, file_root), + } + if has_bz2: + compression_formats[".bz2"] = (bz2.open, {}, output) + if has_lzma: + compression_formats[".lzma"] = ( + lzma.open, + {"format": lzma.FORMAT_ALONE}, + output, + ) + compression_formats[".xz"] = (lzma.open, {}, output) + try: + open_method, kwargs, file_path = compression_formats[file_ext] + except KeyError: + open_method, kwargs, file_path = (open, {}, output) + if file_path != output: + file_name = os.path.basename(file_path) + warnings.warn( + f"Unsupported file extension ({file_ext}). " + f"Fixtures saved in '{file_name}'.", + RuntimeWarning, + ) + stream = open_method(file_path, "wt", **kwargs) + else: + stream = None + try: + serializers.serialize( + format, + get_objects(), + indent=indent, + use_natural_foreign_keys=use_natural_foreign_keys, + use_natural_primary_keys=use_natural_primary_keys, + stream=stream or self.stdout, + progress_output=progress_output, + object_count=object_count, + ) + finally: + if stream: + stream.close() + except Exception as e: + if show_traceback: + raise + raise CommandError("Unable to serialize database: %s" % e) diff --git a/testbed/django__django/django/core/management/commands/flush.py b/testbed/django__django/django/core/management/commands/flush.py new file mode 100644 index 0000000000000000000000000000000000000000..e9d440dd86511b4c07fb8da60455e966a1194e9d --- /dev/null +++ b/testbed/django__django/django/core/management/commands/flush.py @@ -0,0 +1,92 @@ +from importlib import import_module + +from django.apps import apps +from django.core.management.base import BaseCommand, CommandError +from django.core.management.color import no_style +from django.core.management.sql import emit_post_migrate_signal, sql_flush +from django.db import DEFAULT_DB_ALIAS, connections + + +class Command(BaseCommand): + help = ( + "Removes ALL DATA from the database, including data added during " + 'migrations. Does not achieve a "fresh install" state.' + ) + stealth_options = ("reset_sequences", "allow_cascade", "inhibit_post_migrate") + + def add_arguments(self, parser): + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help='Nominates a database to flush. Defaults to the "default" database.', + ) + + def handle(self, **options): + database = options["database"] + connection = connections[database] + verbosity = options["verbosity"] + interactive = options["interactive"] + # The following are stealth options used by Django's internals. + reset_sequences = options.get("reset_sequences", True) + allow_cascade = options.get("allow_cascade", False) + inhibit_post_migrate = options.get("inhibit_post_migrate", False) + + self.style = no_style() + + # Import the 'management' module within each installed app, to register + # dispatcher events. + for app_config in apps.get_app_configs(): + try: + import_module(".management", app_config.name) + except ImportError: + pass + + sql_list = sql_flush( + self.style, + connection, + reset_sequences=reset_sequences, + allow_cascade=allow_cascade, + ) + + if interactive: + confirm = input( + """You have requested a flush of the database. +This will IRREVERSIBLY DESTROY all data currently in the "%s" database, +and return each table to an empty state. +Are you sure you want to do this? + + Type 'yes' to continue, or 'no' to cancel: """ + % connection.settings_dict["NAME"] + ) + else: + confirm = "yes" + + if confirm == "yes": + try: + connection.ops.execute_sql_flush(sql_list) + except Exception as exc: + raise CommandError( + "Database %s couldn't be flushed. Possible reasons:\n" + " * The database isn't running or isn't configured correctly.\n" + " * At least one of the expected database tables doesn't exist.\n" + " * The SQL was invalid.\n" + "Hint: Look at the output of 'django-admin sqlflush'. " + "That's the SQL this command wasn't able to run." + % (connection.settings_dict["NAME"],) + ) from exc + + # Empty sql_list may signify an empty database and post_migrate + # would then crash. + if sql_list and not inhibit_post_migrate: + # Emit the post migrate signal. This allows individual applications to + # respond as if the database had been migrated from scratch. + emit_post_migrate_signal(verbosity, interactive, database) + else: + self.stdout.write("Flush cancelled.") diff --git a/testbed/django__django/django/core/management/commands/inspectdb.py b/testbed/django__django/django/core/management/commands/inspectdb.py new file mode 100644 index 0000000000000000000000000000000000000000..5c2ed53db8f03a5c738d8d6021d1fcdd0c68fdb3 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/inspectdb.py @@ -0,0 +1,414 @@ +import keyword +import re + +from django.core.management.base import BaseCommand, CommandError +from django.db import DEFAULT_DB_ALIAS, connections +from django.db.models.constants import LOOKUP_SEP + + +class Command(BaseCommand): + help = ( + "Introspects the database tables in the given database and outputs a Django " + "model module." + ) + requires_system_checks = [] + stealth_options = ("table_name_filter",) + db_module = "django.db" + + def add_arguments(self, parser): + parser.add_argument( + "table", + nargs="*", + type=str, + help="Selects what tables or views should be introspected.", + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + 'Nominates a database to introspect. Defaults to using the "default" ' + "database." + ), + ) + parser.add_argument( + "--include-partitions", + action="store_true", + help="Also output models for partition tables.", + ) + parser.add_argument( + "--include-views", + action="store_true", + help="Also output models for database views.", + ) + + def handle(self, **options): + try: + for line in self.handle_inspection(options): + self.stdout.write(line) + except NotImplementedError: + raise CommandError( + "Database inspection isn't supported for the currently selected " + "database backend." + ) + + def handle_inspection(self, options): + connection = connections[options["database"]] + # 'table_name_filter' is a stealth option + table_name_filter = options.get("table_name_filter") + + with connection.cursor() as cursor: + yield "# This is an auto-generated Django model module." + yield "# You'll have to do the following manually to clean this up:" + yield "# * Rearrange models' order" + yield "# * Make sure each model has one field with primary_key=True" + yield ( + "# * Make sure each ForeignKey and OneToOneField has `on_delete` set " + "to the desired behavior" + ) + yield ( + "# * Remove `managed = False` lines if you wish to allow " + "Django to create, modify, and delete the table" + ) + yield ( + "# Feel free to rename the models, but don't rename db_table values or " + "field names." + ) + yield "from %s import models" % self.db_module + known_models = [] + # Determine types of tables and/or views to be introspected. + types = {"t"} + if options["include_partitions"]: + types.add("p") + if options["include_views"]: + types.add("v") + table_info = connection.introspection.get_table_list(cursor) + table_info = {info.name: info for info in table_info if info.type in types} + + for table_name in options["table"] or sorted(name for name in table_info): + if table_name_filter is not None and callable(table_name_filter): + if not table_name_filter(table_name): + continue + try: + try: + relations = connection.introspection.get_relations( + cursor, table_name + ) + except NotImplementedError: + relations = {} + try: + constraints = connection.introspection.get_constraints( + cursor, table_name + ) + except NotImplementedError: + constraints = {} + primary_key_columns = ( + connection.introspection.get_primary_key_columns( + cursor, table_name + ) + ) + primary_key_column = ( + primary_key_columns[0] if primary_key_columns else None + ) + unique_columns = [ + c["columns"][0] + for c in constraints.values() + if c["unique"] and len(c["columns"]) == 1 + ] + table_description = connection.introspection.get_table_description( + cursor, table_name + ) + except Exception as e: + yield "# Unable to inspect table '%s'" % table_name + yield "# The error was: %s" % e + continue + + model_name = self.normalize_table_name(table_name) + yield "" + yield "" + yield "class %s(models.Model):" % model_name + known_models.append(model_name) + used_column_names = [] # Holds column names used in the table so far + column_to_field_name = {} # Maps column names to names of model fields + used_relations = set() # Holds foreign relations used in the table. + for row in table_description: + comment_notes = ( + [] + ) # Holds Field notes, to be displayed in a Python comment. + extra_params = {} # Holds Field parameters such as 'db_column'. + column_name = row.name + is_relation = column_name in relations + + att_name, params, notes = self.normalize_col_name( + column_name, used_column_names, is_relation + ) + extra_params.update(params) + comment_notes.extend(notes) + + used_column_names.append(att_name) + column_to_field_name[column_name] = att_name + + # Add primary_key and unique, if necessary. + if column_name == primary_key_column: + extra_params["primary_key"] = True + if len(primary_key_columns) > 1: + comment_notes.append( + "The composite primary key (%s) found, that is not " + "supported. The first column is selected." + % ", ".join(primary_key_columns) + ) + elif column_name in unique_columns: + extra_params["unique"] = True + + if is_relation: + ref_db_column, ref_db_table = relations[column_name] + if extra_params.pop("unique", False) or extra_params.get( + "primary_key" + ): + rel_type = "OneToOneField" + else: + rel_type = "ForeignKey" + ref_pk_column = ( + connection.introspection.get_primary_key_column( + cursor, ref_db_table + ) + ) + if ref_pk_column and ref_pk_column != ref_db_column: + extra_params["to_field"] = ref_db_column + rel_to = ( + "self" + if ref_db_table == table_name + else self.normalize_table_name(ref_db_table) + ) + if rel_to in known_models: + field_type = "%s(%s" % (rel_type, rel_to) + else: + field_type = "%s('%s'" % (rel_type, rel_to) + if rel_to in used_relations: + extra_params["related_name"] = "%s_%s_set" % ( + model_name.lower(), + att_name, + ) + used_relations.add(rel_to) + else: + # Calling `get_field_type` to get the field type string and any + # additional parameters and notes. + field_type, field_params, field_notes = self.get_field_type( + connection, table_name, row + ) + extra_params.update(field_params) + comment_notes.extend(field_notes) + + field_type += "(" + + # Don't output 'id = meta.AutoField(primary_key=True)', because + # that's assumed if it doesn't exist. + if att_name == "id" and extra_params == {"primary_key": True}: + if field_type == "AutoField(": + continue + elif ( + field_type + == connection.features.introspected_field_types["AutoField"] + + "(" + ): + comment_notes.append("AutoField?") + + # Add 'null' and 'blank', if the 'null_ok' flag was present in the + # table description. + if row.null_ok: # If it's NULL... + extra_params["blank"] = True + extra_params["null"] = True + + field_desc = "%s = %s%s" % ( + att_name, + # Custom fields will have a dotted path + "" if "." in field_type else "models.", + field_type, + ) + if field_type.startswith(("ForeignKey(", "OneToOneField(")): + field_desc += ", models.DO_NOTHING" + + # Add comment. + if connection.features.supports_comments and row.comment: + extra_params["db_comment"] = row.comment + + if extra_params: + if not field_desc.endswith("("): + field_desc += ", " + field_desc += ", ".join( + "%s=%r" % (k, v) for k, v in extra_params.items() + ) + field_desc += ")" + if comment_notes: + field_desc += " # " + " ".join(comment_notes) + yield " %s" % field_desc + comment = None + if info := table_info.get(table_name): + is_view = info.type == "v" + is_partition = info.type == "p" + if connection.features.supports_comments: + comment = info.comment + else: + is_view = False + is_partition = False + yield from self.get_meta( + table_name, + constraints, + column_to_field_name, + is_view, + is_partition, + comment, + ) + + def normalize_col_name(self, col_name, used_column_names, is_relation): + """ + Modify the column name to make it Python-compatible as a field name + """ + field_params = {} + field_notes = [] + + new_name = col_name.lower() + if new_name != col_name: + field_notes.append("Field name made lowercase.") + + if is_relation: + if new_name.endswith("_id"): + new_name = new_name.removesuffix("_id") + else: + field_params["db_column"] = col_name + + new_name, num_repl = re.subn(r"\W", "_", new_name) + if num_repl > 0: + field_notes.append("Field renamed to remove unsuitable characters.") + + if new_name.find(LOOKUP_SEP) >= 0: + while new_name.find(LOOKUP_SEP) >= 0: + new_name = new_name.replace(LOOKUP_SEP, "_") + if col_name.lower().find(LOOKUP_SEP) >= 0: + # Only add the comment if the double underscore was in the original name + field_notes.append( + "Field renamed because it contained more than one '_' in a row." + ) + + if new_name.startswith("_"): + new_name = "field%s" % new_name + field_notes.append("Field renamed because it started with '_'.") + + if new_name.endswith("_"): + new_name = "%sfield" % new_name + field_notes.append("Field renamed because it ended with '_'.") + + if keyword.iskeyword(new_name): + new_name += "_field" + field_notes.append("Field renamed because it was a Python reserved word.") + + if new_name[0].isdigit(): + new_name = "number_%s" % new_name + field_notes.append( + "Field renamed because it wasn't a valid Python identifier." + ) + + if new_name in used_column_names: + num = 0 + while "%s_%d" % (new_name, num) in used_column_names: + num += 1 + new_name = "%s_%d" % (new_name, num) + field_notes.append("Field renamed because of name conflict.") + + if col_name != new_name and field_notes: + field_params["db_column"] = col_name + + return new_name, field_params, field_notes + + def normalize_table_name(self, table_name): + """Translate the table name to a Python-compatible model name.""" + return re.sub(r"[^a-zA-Z0-9]", "", table_name.title()) + + def get_field_type(self, connection, table_name, row): + """ + Given the database connection, the table name, and the cursor row + description, this routine will return the given field type name, as + well as any additional keyword parameters and notes for the field. + """ + field_params = {} + field_notes = [] + + try: + field_type = connection.introspection.get_field_type(row.type_code, row) + except KeyError: + field_type = "TextField" + field_notes.append("This field type is a guess.") + + # Add max_length for all CharFields. + if field_type == "CharField" and row.display_size: + if (size := int(row.display_size)) and size > 0: + field_params["max_length"] = size + + if field_type in {"CharField", "TextField"} and row.collation: + field_params["db_collation"] = row.collation + + if field_type == "DecimalField": + if row.precision is None or row.scale is None: + field_notes.append( + "max_digits and decimal_places have been guessed, as this " + "database handles decimal fields as float" + ) + field_params["max_digits"] = ( + row.precision if row.precision is not None else 10 + ) + field_params["decimal_places"] = ( + row.scale if row.scale is not None else 5 + ) + else: + field_params["max_digits"] = row.precision + field_params["decimal_places"] = row.scale + + return field_type, field_params, field_notes + + def get_meta( + self, + table_name, + constraints, + column_to_field_name, + is_view, + is_partition, + comment, + ): + """ + Return a sequence comprising the lines of code necessary + to construct the inner Meta class for the model corresponding + to the given database table name. + """ + unique_together = [] + has_unsupported_constraint = False + for params in constraints.values(): + if params["unique"]: + columns = params["columns"] + if None in columns: + has_unsupported_constraint = True + columns = [ + x for x in columns if x is not None and x in column_to_field_name + ] + if len(columns) > 1: + unique_together.append( + str(tuple(column_to_field_name[c] for c in columns)) + ) + if is_view: + managed_comment = " # Created from a view. Don't remove." + elif is_partition: + managed_comment = " # Created from a partition. Don't remove." + else: + managed_comment = "" + meta = [""] + if has_unsupported_constraint: + meta.append(" # A unique constraint could not be introspected.") + meta += [ + " class Meta:", + " managed = False%s" % managed_comment, + " db_table = %r" % table_name, + ] + if unique_together: + tup = "(" + ", ".join(unique_together) + ",)" + meta += [" unique_together = %s" % tup] + if comment: + meta += [f" db_table_comment = {comment!r}"] + return meta diff --git a/testbed/django__django/django/core/management/commands/loaddata.py b/testbed/django__django/django/core/management/commands/loaddata.py new file mode 100644 index 0000000000000000000000000000000000000000..bb46e8ae78c0014a7bcc9a89e3b914ae2551d02e --- /dev/null +++ b/testbed/django__django/django/core/management/commands/loaddata.py @@ -0,0 +1,432 @@ +import functools +import glob +import gzip +import os +import sys +import warnings +import zipfile +from itertools import product + +from django.apps import apps +from django.conf import settings +from django.core import serializers +from django.core.exceptions import ImproperlyConfigured +from django.core.management.base import BaseCommand, CommandError +from django.core.management.color import no_style +from django.core.management.utils import parse_apps_and_model_labels +from django.db import ( + DEFAULT_DB_ALIAS, + DatabaseError, + IntegrityError, + connections, + router, + transaction, +) +from django.utils.functional import cached_property + +try: + import bz2 + + has_bz2 = True +except ImportError: + has_bz2 = False + +try: + import lzma + + has_lzma = True +except ImportError: + has_lzma = False + +READ_STDIN = "-" + + +class Command(BaseCommand): + help = "Installs the named fixture(s) in the database." + missing_args_message = ( + "No database fixture specified. Please provide the path of at least " + "one fixture in the command line." + ) + + def add_arguments(self, parser): + parser.add_argument( + "args", metavar="fixture", nargs="+", help="Fixture labels." + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + "Nominates a specific database to load fixtures into. Defaults to the " + '"default" database.' + ), + ) + parser.add_argument( + "--app", + dest="app_label", + help="Only look for fixtures in the specified app.", + ) + parser.add_argument( + "--ignorenonexistent", + "-i", + action="store_true", + dest="ignore", + help="Ignores entries in the serialized data for fields that do not " + "currently exist on the model.", + ) + parser.add_argument( + "-e", + "--exclude", + action="append", + default=[], + help=( + "An app_label or app_label.ModelName to exclude. Can be used multiple " + "times." + ), + ) + parser.add_argument( + "--format", + help="Format of serialized data when reading from stdin.", + ) + + def handle(self, *fixture_labels, **options): + self.ignore = options["ignore"] + self.using = options["database"] + self.app_label = options["app_label"] + self.verbosity = options["verbosity"] + self.excluded_models, self.excluded_apps = parse_apps_and_model_labels( + options["exclude"] + ) + self.format = options["format"] + + with transaction.atomic(using=self.using): + self.loaddata(fixture_labels) + + # Close the DB connection -- unless we're still in a transaction. This + # is required as a workaround for an edge case in MySQL: if the same + # connection is used to create tables, load data, and query, the query + # can return incorrect results. See Django #7572, MySQL #37735. + if transaction.get_autocommit(self.using): + connections[self.using].close() + + @cached_property + def compression_formats(self): + """A dict mapping format names to (open function, mode arg) tuples.""" + # Forcing binary mode may be revisited after dropping Python 2 support + # (see #22399). + compression_formats = { + None: (open, "rb"), + "gz": (gzip.GzipFile, "rb"), + "zip": (SingleZipReader, "r"), + "stdin": (lambda *args: sys.stdin, None), + } + if has_bz2: + compression_formats["bz2"] = (bz2.BZ2File, "r") + if has_lzma: + compression_formats["lzma"] = (lzma.LZMAFile, "r") + compression_formats["xz"] = (lzma.LZMAFile, "r") + return compression_formats + + def reset_sequences(self, connection, models): + """Reset database sequences for the given connection and models.""" + sequence_sql = connection.ops.sequence_reset_sql(no_style(), models) + if sequence_sql: + if self.verbosity >= 2: + self.stdout.write("Resetting sequences") + with connection.cursor() as cursor: + for line in sequence_sql: + cursor.execute(line) + + def loaddata(self, fixture_labels): + connection = connections[self.using] + + # Keep a count of the installed objects and fixtures + self.fixture_count = 0 + self.loaded_object_count = 0 + self.fixture_object_count = 0 + self.models = set() + + self.serialization_formats = serializers.get_public_serializer_formats() + + # Django's test suite repeatedly tries to load initial_data fixtures + # from apps that don't have any fixtures. Because disabling constraint + # checks can be expensive on some database (especially MSSQL), bail + # out early if no fixtures are found. + for fixture_label in fixture_labels: + if self.find_fixtures(fixture_label): + break + else: + return + + self.objs_with_deferred_fields = [] + with connection.constraint_checks_disabled(): + for fixture_label in fixture_labels: + self.load_label(fixture_label) + for obj in self.objs_with_deferred_fields: + obj.save_deferred_fields(using=self.using) + + # Since we disabled constraint checks, we must manually check for + # any invalid keys that might have been added + table_names = [model._meta.db_table for model in self.models] + try: + connection.check_constraints(table_names=table_names) + except Exception as e: + e.args = ("Problem installing fixtures: %s" % e,) + raise + + # If we found even one object in a fixture, we need to reset the + # database sequences. + if self.loaded_object_count > 0: + self.reset_sequences(connection, self.models) + + if self.verbosity >= 1: + if self.fixture_object_count == self.loaded_object_count: + self.stdout.write( + "Installed %d object(s) from %d fixture(s)" + % (self.loaded_object_count, self.fixture_count) + ) + else: + self.stdout.write( + "Installed %d object(s) (of %d) from %d fixture(s)" + % ( + self.loaded_object_count, + self.fixture_object_count, + self.fixture_count, + ) + ) + + def save_obj(self, obj): + """Save an object if permitted.""" + if ( + obj.object._meta.app_config in self.excluded_apps + or type(obj.object) in self.excluded_models + ): + return False + saved = False + if router.allow_migrate_model(self.using, obj.object.__class__): + saved = True + self.models.add(obj.object.__class__) + try: + obj.save(using=self.using) + # psycopg raises ValueError if data contains NUL chars. + except (DatabaseError, IntegrityError, ValueError) as e: + e.args = ( + "Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s" + % { + "object_label": obj.object._meta.label, + "pk": obj.object.pk, + "error_msg": e, + }, + ) + raise + if obj.deferred_fields: + self.objs_with_deferred_fields.append(obj) + return saved + + def load_label(self, fixture_label): + """Load fixtures files for a given label.""" + show_progress = self.verbosity >= 3 + for fixture_file, fixture_dir, fixture_name in self.find_fixtures( + fixture_label + ): + _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file)) + open_method, mode = self.compression_formats[cmp_fmt] + fixture = open_method(fixture_file, mode) + self.fixture_count += 1 + objects_in_fixture = 0 + loaded_objects_in_fixture = 0 + if self.verbosity >= 2: + self.stdout.write( + "Installing %s fixture '%s' from %s." + % (ser_fmt, fixture_name, humanize(fixture_dir)) + ) + try: + objects = serializers.deserialize( + ser_fmt, + fixture, + using=self.using, + ignorenonexistent=self.ignore, + handle_forward_references=True, + ) + + for obj in objects: + objects_in_fixture += 1 + if self.save_obj(obj): + loaded_objects_in_fixture += 1 + if show_progress: + self.stdout.write( + "\rProcessed %i object(s)." % loaded_objects_in_fixture, + ending="", + ) + except Exception as e: + if not isinstance(e, CommandError): + e.args = ( + "Problem installing fixture '%s': %s" % (fixture_file, e), + ) + raise + finally: + fixture.close() + + if objects_in_fixture and show_progress: + self.stdout.write() # Add a newline after progress indicator. + self.loaded_object_count += loaded_objects_in_fixture + self.fixture_object_count += objects_in_fixture + # Warn if the fixture we loaded contains 0 objects. + if objects_in_fixture == 0: + warnings.warn( + "No fixture data found for '%s'. (File format may be " + "invalid.)" % fixture_name, + RuntimeWarning, + ) + + def get_fixture_name_and_dirs(self, fixture_name): + dirname, basename = os.path.split(fixture_name) + if os.path.isabs(fixture_name): + fixture_dirs = [dirname] + else: + fixture_dirs = self.fixture_dirs + if os.path.sep in os.path.normpath(fixture_name): + fixture_dirs = [os.path.join(dir_, dirname) for dir_ in fixture_dirs] + return basename, fixture_dirs + + def get_targets(self, fixture_name, ser_fmt, cmp_fmt): + databases = [self.using, None] + cmp_fmts = self.compression_formats if cmp_fmt is None else [cmp_fmt] + ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt] + return { + "%s.%s" + % ( + fixture_name, + ".".join([ext for ext in combo if ext]), + ) + for combo in product(databases, ser_fmts, cmp_fmts) + } + + def find_fixture_files_in_dir(self, fixture_dir, fixture_name, targets): + fixture_files_in_dir = [] + path = os.path.join(fixture_dir, fixture_name) + for candidate in glob.iglob(glob.escape(path) + "*"): + if os.path.basename(candidate) in targets: + # Save the fixture_dir and fixture_name for future error + # messages. + fixture_files_in_dir.append((candidate, fixture_dir, fixture_name)) + return fixture_files_in_dir + + @functools.cache + def find_fixtures(self, fixture_label): + """Find fixture files for a given label.""" + if fixture_label == READ_STDIN: + return [(READ_STDIN, None, READ_STDIN)] + + fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label) + if self.verbosity >= 2: + self.stdout.write("Loading '%s' fixtures..." % fixture_name) + + fixture_name, fixture_dirs = self.get_fixture_name_and_dirs(fixture_name) + targets = self.get_targets(fixture_name, ser_fmt, cmp_fmt) + fixture_files = [] + for fixture_dir in fixture_dirs: + if self.verbosity >= 2: + self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) + fixture_files_in_dir = self.find_fixture_files_in_dir( + fixture_dir, + fixture_name, + targets, + ) + if self.verbosity >= 2 and not fixture_files_in_dir: + self.stdout.write( + "No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir)) + ) + + # Check kept for backwards-compatibility; it isn't clear why + # duplicates are only allowed in different directories. + if len(fixture_files_in_dir) > 1: + raise CommandError( + "Multiple fixtures named '%s' in %s. Aborting." + % (fixture_name, humanize(fixture_dir)) + ) + fixture_files.extend(fixture_files_in_dir) + + if not fixture_files: + raise CommandError("No fixture named '%s' found." % fixture_name) + + return fixture_files + + @cached_property + def fixture_dirs(self): + """ + Return a list of fixture directories. + + The list contains the 'fixtures' subdirectory of each installed + application, if it exists, the directories in FIXTURE_DIRS, and the + current directory. + """ + dirs = [] + fixture_dirs = settings.FIXTURE_DIRS + if len(fixture_dirs) != len(set(fixture_dirs)): + raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.") + for app_config in apps.get_app_configs(): + app_label = app_config.label + app_dir = os.path.join(app_config.path, "fixtures") + if app_dir in [str(d) for d in fixture_dirs]: + raise ImproperlyConfigured( + "'%s' is a default fixture directory for the '%s' app " + "and cannot be listed in settings.FIXTURE_DIRS." + % (app_dir, app_label) + ) + + if self.app_label and app_label != self.app_label: + continue + if os.path.isdir(app_dir): + dirs.append(app_dir) + dirs.extend(fixture_dirs) + dirs.append("") + return [os.path.realpath(d) for d in dirs] + + def parse_name(self, fixture_name): + """ + Split fixture name in name, serialization format, compression format. + """ + if fixture_name == READ_STDIN: + if not self.format: + raise CommandError( + "--format must be specified when reading from stdin." + ) + return READ_STDIN, self.format, "stdin" + + parts = fixture_name.rsplit(".", 2) + + if len(parts) > 1 and parts[-1] in self.compression_formats: + cmp_fmt = parts[-1] + parts = parts[:-1] + else: + cmp_fmt = None + + if len(parts) > 1: + if parts[-1] in self.serialization_formats: + ser_fmt = parts[-1] + parts = parts[:-1] + else: + raise CommandError( + "Problem installing fixture '%s': %s is not a known " + "serialization format." % (".".join(parts[:-1]), parts[-1]) + ) + else: + ser_fmt = None + + name = ".".join(parts) + + return name, ser_fmt, cmp_fmt + + +class SingleZipReader(zipfile.ZipFile): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if len(self.namelist()) != 1: + raise ValueError("Zip-compressed fixtures must contain one file.") + + def read(self): + return zipfile.ZipFile.read(self, self.namelist()[0]) + + +def humanize(dirname): + return "'%s'" % dirname if dirname else "absolute path" diff --git a/testbed/django__django/django/core/management/commands/makemessages.py b/testbed/django__django/django/core/management/commands/makemessages.py new file mode 100644 index 0000000000000000000000000000000000000000..1d4947fb308c776ae0d33788963b14fbc8df2ba1 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/makemessages.py @@ -0,0 +1,783 @@ +import glob +import os +import re +import sys +from functools import total_ordering +from itertools import dropwhile +from pathlib import Path + +import django +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.core.files.temp import NamedTemporaryFile +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import ( + find_command, + handle_extensions, + is_ignored_path, + popen_wrapper, +) +from django.utils.encoding import DEFAULT_LOCALE_ENCODING +from django.utils.functional import cached_property +from django.utils.jslex import prepare_js_for_gettext +from django.utils.regex_helper import _lazy_re_compile +from django.utils.text import get_text_list +from django.utils.translation import templatize + +plural_forms_re = _lazy_re_compile( + r'^(?P"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL +) +STATUS_OK = 0 +NO_LOCALE_DIR = object() + + +def check_programs(*programs): + for program in programs: + if find_command(program) is None: + raise CommandError( + "Can't find %s. Make sure you have GNU gettext tools 0.15 or " + "newer installed." % program + ) + + +def is_valid_locale(locale): + return re.match(r"^[a-z]+$", locale) or re.match(r"^[a-z]+_[A-Z].*$", locale) + + +@total_ordering +class TranslatableFile: + def __init__(self, dirpath, file_name, locale_dir): + self.file = file_name + self.dirpath = dirpath + self.locale_dir = locale_dir + + def __repr__(self): + return "<%s: %s>" % ( + self.__class__.__name__, + os.sep.join([self.dirpath, self.file]), + ) + + def __eq__(self, other): + return self.path == other.path + + def __lt__(self, other): + return self.path < other.path + + @property + def path(self): + return os.path.join(self.dirpath, self.file) + + +class BuildFile: + """ + Represent the state of a translatable file during the build process. + """ + + def __init__(self, command, domain, translatable): + self.command = command + self.domain = domain + self.translatable = translatable + + @cached_property + def is_templatized(self): + if self.domain == "djangojs": + return self.command.gettext_version < (0, 18, 3) + elif self.domain == "django": + file_ext = os.path.splitext(self.translatable.file)[1] + return file_ext != ".py" + return False + + @cached_property + def path(self): + return self.translatable.path + + @cached_property + def work_path(self): + """ + Path to a file which is being fed into GNU gettext pipeline. This may + be either a translatable or its preprocessed version. + """ + if not self.is_templatized: + return self.path + extension = { + "djangojs": "c", + "django": "py", + }.get(self.domain) + filename = "%s.%s" % (self.translatable.file, extension) + return os.path.join(self.translatable.dirpath, filename) + + def preprocess(self): + """ + Preprocess (if necessary) a translatable file before passing it to + xgettext GNU gettext utility. + """ + if not self.is_templatized: + return + + with open(self.path, encoding="utf-8") as fp: + src_data = fp.read() + + if self.domain == "djangojs": + content = prepare_js_for_gettext(src_data) + elif self.domain == "django": + content = templatize(src_data, origin=self.path[2:]) + + with open(self.work_path, "w", encoding="utf-8") as fp: + fp.write(content) + + def postprocess_messages(self, msgs): + """ + Postprocess messages generated by xgettext GNU gettext utility. + + Transform paths as if these messages were generated from original + translatable files rather than from preprocessed versions. + """ + if not self.is_templatized: + return msgs + + # Remove '.py' suffix + if os.name == "nt": + # Preserve '.\' prefix on Windows to respect gettext behavior + old_path = self.work_path + new_path = self.path + else: + old_path = self.work_path[2:] + new_path = self.path[2:] + + return re.sub( + r"^(#: .*)(" + re.escape(old_path) + r")", + lambda match: match[0].replace(old_path, new_path), + msgs, + flags=re.MULTILINE, + ) + + def cleanup(self): + """ + Remove a preprocessed copy of a translatable file (if any). + """ + if self.is_templatized: + # This check is needed for the case of a symlinked file and its + # source being processed inside a single group (locale dir); + # removing either of those two removes both. + if os.path.exists(self.work_path): + os.unlink(self.work_path) + + +def normalize_eols(raw_contents): + """ + Take a block of raw text that will be passed through str.splitlines() to + get universal newlines treatment. + + Return the resulting block of text with normalized `\n` EOL sequences ready + to be written to disk using current platform's native EOLs. + """ + lines_list = raw_contents.splitlines() + # Ensure last line has its EOL + if lines_list and lines_list[-1]: + lines_list.append("") + return "\n".join(lines_list) + + +def write_pot_file(potfile, msgs): + """ + Write the `potfile` with the `msgs` contents, making sure its format is + valid. + """ + pot_lines = msgs.splitlines() + if os.path.exists(potfile): + # Strip the header + lines = dropwhile(len, pot_lines) + else: + lines = [] + found, header_read = False, False + for line in pot_lines: + if not found and not header_read: + if "charset=CHARSET" in line: + found = True + line = line.replace("charset=CHARSET", "charset=UTF-8") + if not line and not found: + header_read = True + lines.append(line) + msgs = "\n".join(lines) + # Force newlines of POT files to '\n' to work around + # https://savannah.gnu.org/bugs/index.php?52395 + with open(potfile, "a", encoding="utf-8", newline="\n") as fp: + fp.write(msgs) + + +class Command(BaseCommand): + help = ( + "Runs over the entire source tree of the current directory and pulls out all " + "strings marked for translation. It creates (or updates) a message file in the " + "conf/locale (in the django tree) or locale (for projects and applications) " + "directory.\n\nYou must run this command with one of either the --locale, " + "--exclude, or --all options." + ) + + translatable_file_class = TranslatableFile + build_file_class = BuildFile + + requires_system_checks = [] + + msgmerge_options = ["-q", "--backup=none", "--previous", "--update"] + msguniq_options = ["--to-code=utf-8"] + msgattrib_options = ["--no-obsolete"] + xgettext_options = ["--from-code=UTF-8", "--add-comments=Translators"] + + def add_arguments(self, parser): + parser.add_argument( + "--locale", + "-l", + default=[], + action="append", + help=( + "Creates or updates the message files for the given locale(s) (e.g. " + "pt_BR). Can be used multiple times." + ), + ) + parser.add_argument( + "--exclude", + "-x", + default=[], + action="append", + help="Locales to exclude. Default is none. Can be used multiple times.", + ) + parser.add_argument( + "--domain", + "-d", + default="django", + help='The domain of the message files (default: "django").', + ) + parser.add_argument( + "--all", + "-a", + action="store_true", + help="Updates the message files for all existing locales.", + ) + parser.add_argument( + "--extension", + "-e", + dest="extensions", + action="append", + help='The file extension(s) to examine (default: "html,txt,py", or "js" ' + 'if the domain is "djangojs"). Separate multiple extensions with ' + "commas, or use -e multiple times.", + ) + parser.add_argument( + "--symlinks", + "-s", + action="store_true", + help="Follows symlinks to directories when examining source code " + "and templates for translation strings.", + ) + parser.add_argument( + "--ignore", + "-i", + action="append", + dest="ignore_patterns", + default=[], + metavar="PATTERN", + help="Ignore files or directories matching this glob-style pattern. " + "Use multiple times to ignore more.", + ) + parser.add_argument( + "--no-default-ignore", + action="store_false", + dest="use_default_ignore_patterns", + help=( + "Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and " + "'*.pyc'." + ), + ) + parser.add_argument( + "--no-wrap", + action="store_true", + help="Don't break long message lines into several lines.", + ) + parser.add_argument( + "--no-location", + action="store_true", + help="Don't write '#: filename:line' lines.", + ) + parser.add_argument( + "--add-location", + choices=("full", "file", "never"), + const="full", + nargs="?", + help=( + "Controls '#: filename:line' lines. If the option is 'full' " + "(the default if not given), the lines include both file name " + "and line number. If it's 'file', the line number is omitted. If " + "it's 'never', the lines are suppressed (same as --no-location). " + "--add-location requires gettext 0.19 or newer." + ), + ) + parser.add_argument( + "--no-obsolete", + action="store_true", + help="Remove obsolete message strings.", + ) + parser.add_argument( + "--keep-pot", + action="store_true", + help="Keep .pot file after making messages. Useful when debugging.", + ) + + def handle(self, *args, **options): + locale = options["locale"] + exclude = options["exclude"] + self.domain = options["domain"] + self.verbosity = options["verbosity"] + process_all = options["all"] + extensions = options["extensions"] + self.symlinks = options["symlinks"] + + ignore_patterns = options["ignore_patterns"] + if options["use_default_ignore_patterns"]: + ignore_patterns += ["CVS", ".*", "*~", "*.pyc"] + self.ignore_patterns = list(set(ignore_patterns)) + + # Avoid messing with mutable class variables + if options["no_wrap"]: + self.msgmerge_options = self.msgmerge_options[:] + ["--no-wrap"] + self.msguniq_options = self.msguniq_options[:] + ["--no-wrap"] + self.msgattrib_options = self.msgattrib_options[:] + ["--no-wrap"] + self.xgettext_options = self.xgettext_options[:] + ["--no-wrap"] + if options["no_location"]: + self.msgmerge_options = self.msgmerge_options[:] + ["--no-location"] + self.msguniq_options = self.msguniq_options[:] + ["--no-location"] + self.msgattrib_options = self.msgattrib_options[:] + ["--no-location"] + self.xgettext_options = self.xgettext_options[:] + ["--no-location"] + if options["add_location"]: + if self.gettext_version < (0, 19): + raise CommandError( + "The --add-location option requires gettext 0.19 or later. " + "You have %s." % ".".join(str(x) for x in self.gettext_version) + ) + arg_add_location = "--add-location=%s" % options["add_location"] + self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location] + self.msguniq_options = self.msguniq_options[:] + [arg_add_location] + self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location] + self.xgettext_options = self.xgettext_options[:] + [arg_add_location] + + self.no_obsolete = options["no_obsolete"] + self.keep_pot = options["keep_pot"] + + if self.domain not in ("django", "djangojs"): + raise CommandError( + "currently makemessages only supports domains " + "'django' and 'djangojs'" + ) + if self.domain == "djangojs": + exts = extensions or ["js"] + else: + exts = extensions or ["html", "txt", "py"] + self.extensions = handle_extensions(exts) + + if (not locale and not exclude and not process_all) or self.domain is None: + raise CommandError( + "Type '%s help %s' for usage information." + % (os.path.basename(sys.argv[0]), sys.argv[1]) + ) + + if self.verbosity > 1: + self.stdout.write( + "examining files with the extensions: %s" + % get_text_list(list(self.extensions), "and") + ) + + self.invoked_for_django = False + self.locale_paths = [] + self.default_locale_path = None + if os.path.isdir(os.path.join("conf", "locale")): + self.locale_paths = [os.path.abspath(os.path.join("conf", "locale"))] + self.default_locale_path = self.locale_paths[0] + self.invoked_for_django = True + else: + if self.settings_available: + self.locale_paths.extend(settings.LOCALE_PATHS) + # Allow to run makemessages inside an app dir + if os.path.isdir("locale"): + self.locale_paths.append(os.path.abspath("locale")) + if self.locale_paths: + self.default_locale_path = self.locale_paths[0] + os.makedirs(self.default_locale_path, exist_ok=True) + + # Build locale list + looks_like_locale = re.compile(r"[a-z]{2}") + locale_dirs = filter( + os.path.isdir, glob.glob("%s/*" % self.default_locale_path) + ) + all_locales = [ + lang_code + for lang_code in map(os.path.basename, locale_dirs) + if looks_like_locale.match(lang_code) + ] + + # Account for excluded locales + if process_all: + locales = all_locales + else: + locales = locale or all_locales + locales = set(locales).difference(exclude) + + if locales: + check_programs("msguniq", "msgmerge", "msgattrib") + + check_programs("xgettext") + + try: + potfiles = self.build_potfiles() + + # Build po files for each selected locale + for locale in locales: + if not is_valid_locale(locale): + # Try to guess what valid locale it could be + # Valid examples are: en_GB, shi_Latn_MA and nl_NL-x-informal + + # Search for characters followed by a non character (i.e. separator) + match = re.match( + r"^(?P[a-zA-Z]+)" + r"(?P[^a-zA-Z])" + r"(?P.+)$", + locale, + ) + if match: + locale_parts = match.groupdict() + language = locale_parts["language"].lower() + territory = ( + locale_parts["territory"][:2].upper() + + locale_parts["territory"][2:] + ) + proposed_locale = f"{language}_{territory}" + else: + # It could be a language in uppercase + proposed_locale = locale.lower() + + # Recheck if the proposed locale is valid + if is_valid_locale(proposed_locale): + self.stdout.write( + "invalid locale %s, did you mean %s?" + % ( + locale, + proposed_locale, + ), + ) + else: + self.stdout.write("invalid locale %s" % locale) + + continue + if self.verbosity > 0: + self.stdout.write("processing locale %s" % locale) + for potfile in potfiles: + self.write_po_file(potfile, locale) + finally: + if not self.keep_pot: + self.remove_potfiles() + + @cached_property + def gettext_version(self): + # Gettext tools will output system-encoded bytestrings instead of UTF-8, + # when looking up the version. It's especially a problem on Windows. + out, err, status = popen_wrapper( + ["xgettext", "--version"], + stdout_encoding=DEFAULT_LOCALE_ENCODING, + ) + m = re.search(r"(\d+)\.(\d+)\.?(\d+)?", out) + if m: + return tuple(int(d) for d in m.groups() if d is not None) + else: + raise CommandError("Unable to get gettext version. Is it installed?") + + @cached_property + def settings_available(self): + try: + settings.LOCALE_PATHS + except ImproperlyConfigured: + if self.verbosity > 1: + self.stderr.write("Running without configured settings.") + return False + return True + + def build_potfiles(self): + """ + Build pot files and apply msguniq to them. + """ + file_list = self.find_files(".") + self.remove_potfiles() + self.process_files(file_list) + potfiles = [] + for path in self.locale_paths: + potfile = os.path.join(path, "%s.pot" % self.domain) + if not os.path.exists(potfile): + continue + args = ["msguniq"] + self.msguniq_options + [potfile] + msgs, errors, status = popen_wrapper(args) + if errors: + if status != STATUS_OK: + raise CommandError( + "errors happened while running msguniq\n%s" % errors + ) + elif self.verbosity > 0: + self.stdout.write(errors) + msgs = normalize_eols(msgs) + with open(potfile, "w", encoding="utf-8") as fp: + fp.write(msgs) + potfiles.append(potfile) + return potfiles + + def remove_potfiles(self): + for path in self.locale_paths: + pot_path = os.path.join(path, "%s.pot" % self.domain) + if os.path.exists(pot_path): + os.unlink(pot_path) + + def find_files(self, root): + """ + Get all files in the given root. Also check that there is a matching + locale dir for each file. + """ + all_files = [] + ignored_roots = [] + if self.settings_available: + ignored_roots = [ + os.path.normpath(p) + for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) + if p + ] + for dirpath, dirnames, filenames in os.walk( + root, topdown=True, followlinks=self.symlinks + ): + for dirname in dirnames[:]: + if ( + is_ignored_path( + os.path.normpath(os.path.join(dirpath, dirname)), + self.ignore_patterns, + ) + or os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots + ): + dirnames.remove(dirname) + if self.verbosity > 1: + self.stdout.write("ignoring directory %s" % dirname) + elif dirname == "locale": + dirnames.remove(dirname) + self.locale_paths.insert( + 0, os.path.join(os.path.abspath(dirpath), dirname) + ) + for filename in filenames: + file_path = os.path.normpath(os.path.join(dirpath, filename)) + file_ext = os.path.splitext(filename)[1] + if file_ext not in self.extensions or is_ignored_path( + file_path, self.ignore_patterns + ): + if self.verbosity > 1: + self.stdout.write( + "ignoring file %s in %s" % (filename, dirpath) + ) + else: + locale_dir = None + for path in self.locale_paths: + if os.path.abspath(dirpath).startswith(os.path.dirname(path)): + locale_dir = path + break + locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR + all_files.append( + self.translatable_file_class(dirpath, filename, locale_dir) + ) + return sorted(all_files) + + def process_files(self, file_list): + """ + Group translatable files by locale directory and run pot file build + process for each group. + """ + file_groups = {} + for translatable in file_list: + file_group = file_groups.setdefault(translatable.locale_dir, []) + file_group.append(translatable) + for locale_dir, files in file_groups.items(): + self.process_locale_dir(locale_dir, files) + + def process_locale_dir(self, locale_dir, files): + """ + Extract translatable literals from the specified files, creating or + updating the POT file for a given locale directory. + + Use the xgettext GNU gettext utility. + """ + build_files = [] + for translatable in files: + if self.verbosity > 1: + self.stdout.write( + "processing file %s in %s" + % (translatable.file, translatable.dirpath) + ) + if self.domain not in ("djangojs", "django"): + continue + build_file = self.build_file_class(self, self.domain, translatable) + try: + build_file.preprocess() + except UnicodeDecodeError as e: + self.stdout.write( + "UnicodeDecodeError: skipped file %s in %s (reason: %s)" + % ( + translatable.file, + translatable.dirpath, + e, + ) + ) + continue + except BaseException: + # Cleanup before exit. + for build_file in build_files: + build_file.cleanup() + raise + build_files.append(build_file) + + if self.domain == "djangojs": + is_templatized = build_file.is_templatized + args = [ + "xgettext", + "-d", + self.domain, + "--language=%s" % ("C" if is_templatized else "JavaScript",), + "--keyword=gettext_noop", + "--keyword=gettext_lazy", + "--keyword=ngettext_lazy:1,2", + "--keyword=pgettext:1c,2", + "--keyword=npgettext:1c,2,3", + "--output=-", + ] + elif self.domain == "django": + args = [ + "xgettext", + "-d", + self.domain, + "--language=Python", + "--keyword=gettext_noop", + "--keyword=gettext_lazy", + "--keyword=ngettext_lazy:1,2", + "--keyword=pgettext:1c,2", + "--keyword=npgettext:1c,2,3", + "--keyword=pgettext_lazy:1c,2", + "--keyword=npgettext_lazy:1c,2,3", + "--output=-", + ] + else: + return + + input_files = [bf.work_path for bf in build_files] + with NamedTemporaryFile(mode="w+") as input_files_list: + input_files_list.write("\n".join(input_files)) + input_files_list.flush() + args.extend(["--files-from", input_files_list.name]) + args.extend(self.xgettext_options) + msgs, errors, status = popen_wrapper(args) + + if errors: + if status != STATUS_OK: + for build_file in build_files: + build_file.cleanup() + raise CommandError( + "errors happened while running xgettext on %s\n%s" + % ("\n".join(input_files), errors) + ) + elif self.verbosity > 0: + # Print warnings + self.stdout.write(errors) + + if msgs: + if locale_dir is NO_LOCALE_DIR: + for build_file in build_files: + build_file.cleanup() + file_path = os.path.normpath(build_files[0].path) + raise CommandError( + "Unable to find a locale path to store translations for " + "file %s. Make sure the 'locale' directory exists in an " + "app or LOCALE_PATHS setting is set." % file_path + ) + for build_file in build_files: + msgs = build_file.postprocess_messages(msgs) + potfile = os.path.join(locale_dir, "%s.pot" % self.domain) + write_pot_file(potfile, msgs) + + for build_file in build_files: + build_file.cleanup() + + def write_po_file(self, potfile, locale): + """ + Create or update the PO file for self.domain and `locale`. + Use contents of the existing `potfile`. + + Use msgmerge and msgattrib GNU gettext utilities. + """ + basedir = os.path.join(os.path.dirname(potfile), locale, "LC_MESSAGES") + os.makedirs(basedir, exist_ok=True) + pofile = os.path.join(basedir, "%s.po" % self.domain) + + if os.path.exists(pofile): + args = ["msgmerge"] + self.msgmerge_options + [pofile, potfile] + _, errors, status = popen_wrapper(args) + if errors: + if status != STATUS_OK: + raise CommandError( + "errors happened while running msgmerge\n%s" % errors + ) + elif self.verbosity > 0: + self.stdout.write(errors) + msgs = Path(pofile).read_text(encoding="utf-8") + else: + with open(potfile, encoding="utf-8") as fp: + msgs = fp.read() + if not self.invoked_for_django: + msgs = self.copy_plural_forms(msgs, locale) + msgs = normalize_eols(msgs) + msgs = msgs.replace( + "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "" + ) + with open(pofile, "w", encoding="utf-8") as fp: + fp.write(msgs) + + if self.no_obsolete: + args = ["msgattrib"] + self.msgattrib_options + ["-o", pofile, pofile] + msgs, errors, status = popen_wrapper(args) + if errors: + if status != STATUS_OK: + raise CommandError( + "errors happened while running msgattrib\n%s" % errors + ) + elif self.verbosity > 0: + self.stdout.write(errors) + + def copy_plural_forms(self, msgs, locale): + """ + Copy plural forms header contents from a Django catalog of locale to + the msgs string, inserting it at the right place. msgs should be the + contents of a newly created .po file. + """ + django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__))) + if self.domain == "djangojs": + domains = ("djangojs", "django") + else: + domains = ("django",) + for domain in domains: + django_po = os.path.join( + django_dir, "conf", "locale", locale, "LC_MESSAGES", "%s.po" % domain + ) + if os.path.exists(django_po): + with open(django_po, encoding="utf-8") as fp: + m = plural_forms_re.search(fp.read()) + if m: + plural_form_line = m["value"] + if self.verbosity > 1: + self.stdout.write("copying plural forms: %s" % plural_form_line) + lines = [] + found = False + for line in msgs.splitlines(): + if not found and (not line or plural_forms_re.search(line)): + line = plural_form_line + found = True + lines.append(line) + msgs = "\n".join(lines) + break + return msgs diff --git a/testbed/django__django/django/core/management/commands/makemigrations.py b/testbed/django__django/django/core/management/commands/makemigrations.py new file mode 100644 index 0000000000000000000000000000000000000000..35661d49972d611f75e4acb4871c70fcbf6fc504 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/makemigrations.py @@ -0,0 +1,513 @@ +import os +import sys +import warnings +from itertools import takewhile + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError, no_translations +from django.core.management.utils import run_formatters +from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router +from django.db.migrations import Migration +from django.db.migrations.autodetector import MigrationAutodetector +from django.db.migrations.loader import MigrationLoader +from django.db.migrations.migration import SwappableTuple +from django.db.migrations.optimizer import MigrationOptimizer +from django.db.migrations.questioner import ( + InteractiveMigrationQuestioner, + MigrationQuestioner, + NonInteractiveMigrationQuestioner, +) +from django.db.migrations.state import ProjectState +from django.db.migrations.utils import get_migration_name_timestamp +from django.db.migrations.writer import MigrationWriter + + +class Command(BaseCommand): + help = "Creates new migration(s) for apps." + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="app_label", + nargs="*", + help="Specify the app label(s) to create migrations for.", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Just show what migrations would be made; don't actually write them.", + ) + parser.add_argument( + "--merge", + action="store_true", + help="Enable fixing of migration conflicts.", + ) + parser.add_argument( + "--empty", + action="store_true", + help="Create an empty migration.", + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "-n", + "--name", + help="Use this name for migration file(s).", + ) + parser.add_argument( + "--no-header", + action="store_false", + dest="include_header", + help="Do not add header comments to new migration file(s).", + ) + parser.add_argument( + "--check", + action="store_true", + dest="check_changes", + help=( + "Exit with a non-zero status if model changes are missing migrations " + "and don't actually write them." + ), + ) + parser.add_argument( + "--scriptable", + action="store_true", + dest="scriptable", + help=( + "Divert log output and input prompts to stderr, writing only " + "paths of generated migration files to stdout." + ), + ) + parser.add_argument( + "--update", + action="store_true", + dest="update", + help=( + "Merge model changes into the latest migration and optimize the " + "resulting operations." + ), + ) + + @property + def log_output(self): + return self.stderr if self.scriptable else self.stdout + + def log(self, msg): + self.log_output.write(msg) + + @no_translations + def handle(self, *app_labels, **options): + self.written_files = [] + self.verbosity = options["verbosity"] + self.interactive = options["interactive"] + self.dry_run = options["dry_run"] + self.merge = options["merge"] + self.empty = options["empty"] + self.migration_name = options["name"] + if self.migration_name and not self.migration_name.isidentifier(): + raise CommandError("The migration name must be a valid Python identifier.") + self.include_header = options["include_header"] + check_changes = options["check_changes"] + self.scriptable = options["scriptable"] + self.update = options["update"] + # If logs and prompts are diverted to stderr, remove the ERROR style. + if self.scriptable: + self.stderr.style_func = None + + # Make sure the app they asked for exists + app_labels = set(app_labels) + has_bad_labels = False + for app_label in app_labels: + try: + apps.get_app_config(app_label) + except LookupError as err: + self.stderr.write(str(err)) + has_bad_labels = True + if has_bad_labels: + sys.exit(2) + + # Load the current graph state. Pass in None for the connection so + # the loader doesn't try to resolve replaced migrations from DB. + loader = MigrationLoader(None, ignore_no_migrations=True) + + # Raise an error if any migrations are applied before their dependencies. + consistency_check_labels = {config.label for config in apps.get_app_configs()} + # Non-default databases are only checked if database routers used. + aliases_to_check = ( + connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS] + ) + for alias in sorted(aliases_to_check): + connection = connections[alias] + if connection.settings_dict["ENGINE"] != "django.db.backends.dummy" and any( + # At least one model must be migrated to the database. + router.allow_migrate( + connection.alias, app_label, model_name=model._meta.object_name + ) + for app_label in consistency_check_labels + for model in apps.get_app_config(app_label).get_models() + ): + try: + loader.check_consistent_history(connection) + except OperationalError as error: + warnings.warn( + "Got an error checking a consistent migration history " + "performed for database connection '%s': %s" % (alias, error), + RuntimeWarning, + ) + # Before anything else, see if there's conflicting apps and drop out + # hard if there are any and they don't want to merge + conflicts = loader.detect_conflicts() + + # If app_labels is specified, filter out conflicting migrations for + # unspecified apps. + if app_labels: + conflicts = { + app_label: conflict + for app_label, conflict in conflicts.items() + if app_label in app_labels + } + + if conflicts and not self.merge: + name_str = "; ".join( + "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() + ) + raise CommandError( + "Conflicting migrations detected; multiple leaf nodes in the " + "migration graph: (%s).\nTo fix them run " + "'python manage.py makemigrations --merge'" % name_str + ) + + # If they want to merge and there's nothing to merge, then politely exit + if self.merge and not conflicts: + self.log("No conflicts detected to merge.") + return + + # If they want to merge and there is something to merge, then + # divert into the merge code + if self.merge and conflicts: + return self.handle_merge(loader, conflicts) + + if self.interactive: + questioner = InteractiveMigrationQuestioner( + specified_apps=app_labels, + dry_run=self.dry_run, + prompt_output=self.log_output, + ) + else: + questioner = NonInteractiveMigrationQuestioner( + specified_apps=app_labels, + dry_run=self.dry_run, + verbosity=self.verbosity, + log=self.log, + ) + # Set up autodetector + autodetector = MigrationAutodetector( + loader.project_state(), + ProjectState.from_apps(apps), + questioner, + ) + + # If they want to make an empty migration, make one for each app + if self.empty: + if not app_labels: + raise CommandError( + "You must supply at least one app label when using --empty." + ) + # Make a fake changes() result we can pass to arrange_for_graph + changes = {app: [Migration("custom", app)] for app in app_labels} + changes = autodetector.arrange_for_graph( + changes=changes, + graph=loader.graph, + migration_name=self.migration_name, + ) + self.write_migration_files(changes) + return + + # Detect changes + changes = autodetector.changes( + graph=loader.graph, + trim_to_apps=app_labels or None, + convert_apps=app_labels or None, + migration_name=self.migration_name, + ) + + if not changes: + # No changes? Tell them. + if self.verbosity >= 1: + if app_labels: + if len(app_labels) == 1: + self.log("No changes detected in app '%s'" % app_labels.pop()) + else: + self.log( + "No changes detected in apps '%s'" + % ("', '".join(app_labels)) + ) + else: + self.log("No changes detected") + else: + if check_changes: + sys.exit(1) + if self.update: + self.write_to_last_migration_files(changes) + else: + self.write_migration_files(changes) + + def write_to_last_migration_files(self, changes): + loader = MigrationLoader(connections[DEFAULT_DB_ALIAS]) + new_changes = {} + update_previous_migration_paths = {} + for app_label, app_migrations in changes.items(): + # Find last migration. + leaf_migration_nodes = loader.graph.leaf_nodes(app=app_label) + if len(leaf_migration_nodes) == 0: + raise CommandError( + f"App {app_label} has no migration, cannot update last migration." + ) + leaf_migration_node = leaf_migration_nodes[0] + # Multiple leaf nodes have already been checked earlier in command. + leaf_migration = loader.graph.nodes[leaf_migration_node] + # Updated migration cannot be a squash migration, a dependency of + # another migration, and cannot be already applied. + if leaf_migration.replaces: + raise CommandError( + f"Cannot update squash migration '{leaf_migration}'." + ) + if leaf_migration_node in loader.applied_migrations: + raise CommandError( + f"Cannot update applied migration '{leaf_migration}'." + ) + depending_migrations = [ + migration + for migration in loader.disk_migrations.values() + if leaf_migration_node in migration.dependencies + ] + if depending_migrations: + formatted_migrations = ", ".join( + [f"'{migration}'" for migration in depending_migrations] + ) + raise CommandError( + f"Cannot update migration '{leaf_migration}' that migrations " + f"{formatted_migrations} depend on." + ) + # Build new migration. + for migration in app_migrations: + leaf_migration.operations.extend(migration.operations) + + for dependency in migration.dependencies: + if isinstance(dependency, SwappableTuple): + if settings.AUTH_USER_MODEL == dependency.setting: + leaf_migration.dependencies.append( + ("__setting__", "AUTH_USER_MODEL") + ) + else: + leaf_migration.dependencies.append(dependency) + elif dependency[0] != migration.app_label: + leaf_migration.dependencies.append(dependency) + # Optimize migration. + optimizer = MigrationOptimizer() + leaf_migration.operations = optimizer.optimize( + leaf_migration.operations, app_label + ) + # Update name. + previous_migration_path = MigrationWriter(leaf_migration).path + name_fragment = self.migration_name or leaf_migration.suggest_name() + suggested_name = leaf_migration.name[:4] + f"_{name_fragment}" + if leaf_migration.name == suggested_name: + new_name = leaf_migration.name + "_updated" + else: + new_name = suggested_name + leaf_migration.name = new_name + # Register overridden migration. + new_changes[app_label] = [leaf_migration] + update_previous_migration_paths[app_label] = previous_migration_path + + self.write_migration_files(new_changes, update_previous_migration_paths) + + def write_migration_files(self, changes, update_previous_migration_paths=None): + """ + Take a changes dict and write them out as migration files. + """ + directory_created = {} + for app_label, app_migrations in changes.items(): + if self.verbosity >= 1: + self.log(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label)) + for migration in app_migrations: + # Describe the migration + writer = MigrationWriter(migration, self.include_header) + if self.verbosity >= 1: + # Display a relative path if it's below the current working + # directory, or an absolute path otherwise. + migration_string = self.get_relative_path(writer.path) + self.log(" %s\n" % self.style.MIGRATE_LABEL(migration_string)) + for operation in migration.operations: + self.log(" - %s" % operation.describe()) + if self.scriptable: + self.stdout.write(migration_string) + if not self.dry_run: + # Write the migrations file to the disk. + migrations_directory = os.path.dirname(writer.path) + if not directory_created.get(app_label): + os.makedirs(migrations_directory, exist_ok=True) + init_path = os.path.join(migrations_directory, "__init__.py") + if not os.path.isfile(init_path): + open(init_path, "w").close() + # We just do this once per app + directory_created[app_label] = True + migration_string = writer.as_string() + with open(writer.path, "w", encoding="utf-8") as fh: + fh.write(migration_string) + self.written_files.append(writer.path) + if update_previous_migration_paths: + prev_path = update_previous_migration_paths[app_label] + rel_prev_path = self.get_relative_path(prev_path) + if writer.needs_manual_porting: + migration_path = self.get_relative_path(writer.path) + self.log( + self.style.WARNING( + f"Updated migration {migration_path} requires " + f"manual porting.\n" + f"Previous migration {rel_prev_path} was kept and " + f"must be deleted after porting functions manually." + ) + ) + else: + os.remove(prev_path) + self.log(f"Deleted {rel_prev_path}") + elif self.verbosity == 3: + # Alternatively, makemigrations --dry-run --verbosity 3 + # will log the migrations rather than saving the file to + # the disk. + self.log( + self.style.MIGRATE_HEADING( + "Full migrations file '%s':" % writer.filename + ) + ) + self.log(writer.as_string()) + run_formatters(self.written_files) + + @staticmethod + def get_relative_path(path): + try: + migration_string = os.path.relpath(path) + except ValueError: + migration_string = path + if migration_string.startswith(".."): + migration_string = path + return migration_string + + def handle_merge(self, loader, conflicts): + """ + Handles merging together conflicted migrations interactively, + if it's safe; otherwise, advises on how to fix it. + """ + if self.interactive: + questioner = InteractiveMigrationQuestioner(prompt_output=self.log_output) + else: + questioner = MigrationQuestioner(defaults={"ask_merge": True}) + + for app_label, migration_names in conflicts.items(): + # Grab out the migrations in question, and work out their + # common ancestor. + merge_migrations = [] + for migration_name in migration_names: + migration = loader.get_migration(app_label, migration_name) + migration.ancestry = [ + mig + for mig in loader.graph.forwards_plan((app_label, migration_name)) + if mig[0] == migration.app_label + ] + merge_migrations.append(migration) + + def all_items_equal(seq): + return all(item == seq[0] for item in seq[1:]) + + merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations)) + common_ancestor_count = sum( + 1 + for common_ancestor_generation in takewhile( + all_items_equal, merge_migrations_generations + ) + ) + if not common_ancestor_count: + raise ValueError( + "Could not find common ancestor of %s" % migration_names + ) + # Now work out the operations along each divergent branch + for migration in merge_migrations: + migration.branch = migration.ancestry[common_ancestor_count:] + migrations_ops = ( + loader.get_migration(node_app, node_name).operations + for node_app, node_name in migration.branch + ) + migration.merged_operations = sum(migrations_ops, []) + # In future, this could use some of the Optimizer code + # (can_optimize_through) to automatically see if they're + # mergeable. For now, we always just prompt the user. + if self.verbosity > 0: + self.log(self.style.MIGRATE_HEADING("Merging %s" % app_label)) + for migration in merge_migrations: + self.log(self.style.MIGRATE_LABEL(" Branch %s" % migration.name)) + for operation in migration.merged_operations: + self.log(" - %s" % operation.describe()) + if questioner.ask_merge(app_label): + # If they still want to merge it, then write out an empty + # file depending on the migrations needing merging. + numbers = [ + MigrationAutodetector.parse_number(migration.name) + for migration in merge_migrations + ] + try: + biggest_number = max(x for x in numbers if x is not None) + except ValueError: + biggest_number = 1 + subclass = type( + "Migration", + (Migration,), + { + "dependencies": [ + (app_label, migration.name) + for migration in merge_migrations + ], + }, + ) + parts = ["%04i" % (biggest_number + 1)] + if self.migration_name: + parts.append(self.migration_name) + else: + parts.append("merge") + leaf_names = "_".join( + sorted(migration.name for migration in merge_migrations) + ) + if len(leaf_names) > 47: + parts.append(get_migration_name_timestamp()) + else: + parts.append(leaf_names) + migration_name = "_".join(parts) + new_migration = subclass(migration_name, app_label) + writer = MigrationWriter(new_migration, self.include_header) + + if not self.dry_run: + # Write the merge migrations file to the disk + with open(writer.path, "w", encoding="utf-8") as fh: + fh.write(writer.as_string()) + run_formatters([writer.path]) + if self.verbosity > 0: + self.log("\nCreated new merge migration %s" % writer.path) + if self.scriptable: + self.stdout.write(writer.path) + elif self.verbosity == 3: + # Alternatively, makemigrations --merge --dry-run --verbosity 3 + # will log the merge migrations rather than saving the file + # to the disk. + self.log( + self.style.MIGRATE_HEADING( + "Full merge migrations file '%s':" % writer.filename + ) + ) + self.log(writer.as_string()) diff --git a/testbed/django__django/django/core/management/commands/migrate.py b/testbed/django__django/django/core/management/commands/migrate.py new file mode 100644 index 0000000000000000000000000000000000000000..1541843066ccee68d94328437586e5adce7825e6 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/migrate.py @@ -0,0 +1,511 @@ +import sys +import time +from importlib import import_module + +from django.apps import apps +from django.core.management.base import BaseCommand, CommandError, no_translations +from django.core.management.sql import emit_post_migrate_signal, emit_pre_migrate_signal +from django.db import DEFAULT_DB_ALIAS, connections, router +from django.db.migrations.autodetector import MigrationAutodetector +from django.db.migrations.executor import MigrationExecutor +from django.db.migrations.loader import AmbiguityError +from django.db.migrations.state import ModelState, ProjectState +from django.utils.module_loading import module_has_submodule +from django.utils.text import Truncator + + +class Command(BaseCommand): + help = ( + "Updates database schema. Manages both apps with migrations and those without." + ) + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument( + "--skip-checks", + action="store_true", + help="Skip system checks.", + ) + parser.add_argument( + "app_label", + nargs="?", + help="App label of an application to synchronize the state.", + ) + parser.add_argument( + "migration_name", + nargs="?", + help="Database state will be brought to the state after that " + 'migration. Use the name "zero" to unapply all migrations.', + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + 'Nominates a database to synchronize. Defaults to the "default" ' + "database." + ), + ) + parser.add_argument( + "--fake", + action="store_true", + help="Mark migrations as run without actually running them.", + ) + parser.add_argument( + "--fake-initial", + action="store_true", + help=( + "Detect if tables already exist and fake-apply initial migrations if " + "so. Make sure that the current database schema matches your initial " + "migration before using this flag. Django will only check for an " + "existing table name." + ), + ) + parser.add_argument( + "--plan", + action="store_true", + help="Shows a list of the migration actions that will be performed.", + ) + parser.add_argument( + "--run-syncdb", + action="store_true", + help="Creates tables for apps without migrations.", + ) + parser.add_argument( + "--check", + action="store_true", + dest="check_unapplied", + help=( + "Exits with a non-zero status if unapplied migrations exist and does " + "not actually apply migrations." + ), + ) + parser.add_argument( + "--prune", + action="store_true", + dest="prune", + help="Delete nonexistent migrations from the django_migrations table.", + ) + + @no_translations + def handle(self, *args, **options): + database = options["database"] + if not options["skip_checks"]: + self.check(databases=[database]) + + self.verbosity = options["verbosity"] + self.interactive = options["interactive"] + + # Import the 'management' module within each installed app, to register + # dispatcher events. + for app_config in apps.get_app_configs(): + if module_has_submodule(app_config.module, "management"): + import_module(".management", app_config.name) + + # Get the database we're operating from + connection = connections[database] + + # Hook for backends needing any database preparation + connection.prepare_database() + # Work out which apps have migrations and which do not + executor = MigrationExecutor(connection, self.migration_progress_callback) + + # Raise an error if any migrations are applied before their dependencies. + executor.loader.check_consistent_history(connection) + + # Before anything else, see if there's conflicting apps and drop out + # hard if there are any + conflicts = executor.loader.detect_conflicts() + if conflicts: + name_str = "; ".join( + "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() + ) + raise CommandError( + "Conflicting migrations detected; multiple leaf nodes in the " + "migration graph: (%s).\nTo fix them run " + "'python manage.py makemigrations --merge'" % name_str + ) + + # If they supplied command line arguments, work out what they mean. + run_syncdb = options["run_syncdb"] + target_app_labels_only = True + if options["app_label"]: + # Validate app_label. + app_label = options["app_label"] + try: + apps.get_app_config(app_label) + except LookupError as err: + raise CommandError(str(err)) + if run_syncdb: + if app_label in executor.loader.migrated_apps: + raise CommandError( + "Can't use run_syncdb with app '%s' as it has migrations." + % app_label + ) + elif app_label not in executor.loader.migrated_apps: + raise CommandError("App '%s' does not have migrations." % app_label) + + if options["app_label"] and options["migration_name"]: + migration_name = options["migration_name"] + if migration_name == "zero": + targets = [(app_label, None)] + else: + try: + migration = executor.loader.get_migration_by_prefix( + app_label, migration_name + ) + except AmbiguityError: + raise CommandError( + "More than one migration matches '%s' in app '%s'. " + "Please be more specific." % (migration_name, app_label) + ) + except KeyError: + raise CommandError( + "Cannot find a migration matching '%s' from app '%s'." + % (migration_name, app_label) + ) + target = (app_label, migration.name) + # Partially applied squashed migrations are not included in the + # graph, use the last replacement instead. + if ( + target not in executor.loader.graph.nodes + and target in executor.loader.replacements + ): + incomplete_migration = executor.loader.replacements[target] + target = incomplete_migration.replaces[-1] + targets = [target] + target_app_labels_only = False + elif options["app_label"]: + targets = [ + key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label + ] + else: + targets = executor.loader.graph.leaf_nodes() + + if options["prune"]: + if not options["app_label"]: + raise CommandError( + "Migrations can be pruned only when an app is specified." + ) + if self.verbosity > 0: + self.stdout.write("Pruning migrations:", self.style.MIGRATE_HEADING) + to_prune = set(executor.loader.applied_migrations) - set( + executor.loader.disk_migrations + ) + squashed_migrations_with_deleted_replaced_migrations = [ + migration_key + for migration_key, migration_obj in executor.loader.replacements.items() + if any(replaced in to_prune for replaced in migration_obj.replaces) + ] + if squashed_migrations_with_deleted_replaced_migrations: + self.stdout.write( + self.style.NOTICE( + " Cannot use --prune because the following squashed " + "migrations have their 'replaces' attributes and may not " + "be recorded as applied:" + ) + ) + for migration in squashed_migrations_with_deleted_replaced_migrations: + app, name = migration + self.stdout.write(f" {app}.{name}") + self.stdout.write( + self.style.NOTICE( + " Re-run 'manage.py migrate' if they are not marked as " + "applied, and remove 'replaces' attributes in their " + "Migration classes." + ) + ) + else: + to_prune = sorted( + migration for migration in to_prune if migration[0] == app_label + ) + if to_prune: + for migration in to_prune: + app, name = migration + if self.verbosity > 0: + self.stdout.write( + self.style.MIGRATE_LABEL(f" Pruning {app}.{name}"), + ending="", + ) + executor.recorder.record_unapplied(app, name) + if self.verbosity > 0: + self.stdout.write(self.style.SUCCESS(" OK")) + elif self.verbosity > 0: + self.stdout.write(" No migrations to prune.") + + plan = executor.migration_plan(targets) + + if options["plan"]: + self.stdout.write("Planned operations:", self.style.MIGRATE_LABEL) + if not plan: + self.stdout.write(" No planned migration operations.") + else: + for migration, backwards in plan: + self.stdout.write(str(migration), self.style.MIGRATE_HEADING) + for operation in migration.operations: + message, is_error = self.describe_operation( + operation, backwards + ) + style = self.style.WARNING if is_error else None + self.stdout.write(" " + message, style) + if options["check_unapplied"]: + sys.exit(1) + return + if options["check_unapplied"]: + if plan: + sys.exit(1) + return + if options["prune"]: + return + + # At this point, ignore run_syncdb if there aren't any apps to sync. + run_syncdb = options["run_syncdb"] and executor.loader.unmigrated_apps + # Print some useful info + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:")) + if run_syncdb: + if options["app_label"]: + self.stdout.write( + self.style.MIGRATE_LABEL( + " Synchronize unmigrated app: %s" % app_label + ) + ) + else: + self.stdout.write( + self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + + (", ".join(sorted(executor.loader.unmigrated_apps))) + ) + if target_app_labels_only: + self.stdout.write( + self.style.MIGRATE_LABEL(" Apply all migrations: ") + + (", ".join(sorted({a for a, n in targets})) or "(none)") + ) + else: + if targets[0][1] is None: + self.stdout.write( + self.style.MIGRATE_LABEL(" Unapply all migrations: ") + + str(targets[0][0]) + ) + else: + self.stdout.write( + self.style.MIGRATE_LABEL(" Target specific migration: ") + + "%s, from %s" % (targets[0][1], targets[0][0]) + ) + + pre_migrate_state = executor._create_project_state(with_applied_migrations=True) + pre_migrate_apps = pre_migrate_state.apps + emit_pre_migrate_signal( + self.verbosity, + self.interactive, + connection.alias, + stdout=self.stdout, + apps=pre_migrate_apps, + plan=plan, + ) + + # Run the syncdb phase. + if run_syncdb: + if self.verbosity >= 1: + self.stdout.write( + self.style.MIGRATE_HEADING("Synchronizing apps without migrations:") + ) + if options["app_label"]: + self.sync_apps(connection, [app_label]) + else: + self.sync_apps(connection, executor.loader.unmigrated_apps) + + # Migrate! + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:")) + if not plan: + if self.verbosity >= 1: + self.stdout.write(" No migrations to apply.") + # If there's changes that aren't in migrations yet, tell them + # how to fix it. + autodetector = MigrationAutodetector( + executor.loader.project_state(), + ProjectState.from_apps(apps), + ) + changes = autodetector.changes(graph=executor.loader.graph) + if changes: + self.stdout.write( + self.style.NOTICE( + " Your models in app(s): %s have changes that are not " + "yet reflected in a migration, and so won't be " + "applied." % ", ".join(repr(app) for app in sorted(changes)) + ) + ) + self.stdout.write( + self.style.NOTICE( + " Run 'manage.py makemigrations' to make new " + "migrations, and then re-run 'manage.py migrate' to " + "apply them." + ) + ) + fake = False + fake_initial = False + else: + fake = options["fake"] + fake_initial = options["fake_initial"] + post_migrate_state = executor.migrate( + targets, + plan=plan, + state=pre_migrate_state.clone(), + fake=fake, + fake_initial=fake_initial, + ) + # post_migrate signals have access to all models. Ensure that all models + # are reloaded in case any are delayed. + post_migrate_state.clear_delayed_apps_cache() + post_migrate_apps = post_migrate_state.apps + + # Re-render models of real apps to include relationships now that + # we've got a final state. This wouldn't be necessary if real apps + # models were rendered with relationships in the first place. + with post_migrate_apps.bulk_update(): + model_keys = [] + for model_state in post_migrate_apps.real_models: + model_key = model_state.app_label, model_state.name_lower + model_keys.append(model_key) + post_migrate_apps.unregister_model(*model_key) + post_migrate_apps.render_multiple( + [ModelState.from_model(apps.get_model(*model)) for model in model_keys] + ) + + # Send the post_migrate signal, so individual apps can do whatever they need + # to do at this point. + emit_post_migrate_signal( + self.verbosity, + self.interactive, + connection.alias, + stdout=self.stdout, + apps=post_migrate_apps, + plan=plan, + ) + + def migration_progress_callback(self, action, migration=None, fake=False): + if self.verbosity >= 1: + compute_time = self.verbosity > 1 + if action == "apply_start": + if compute_time: + self.start = time.monotonic() + self.stdout.write(" Applying %s..." % migration, ending="") + self.stdout.flush() + elif action == "apply_success": + elapsed = ( + " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" + ) + if fake: + self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed)) + else: + self.stdout.write(self.style.SUCCESS(" OK" + elapsed)) + elif action == "unapply_start": + if compute_time: + self.start = time.monotonic() + self.stdout.write(" Unapplying %s..." % migration, ending="") + self.stdout.flush() + elif action == "unapply_success": + elapsed = ( + " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" + ) + if fake: + self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed)) + else: + self.stdout.write(self.style.SUCCESS(" OK" + elapsed)) + elif action == "render_start": + if compute_time: + self.start = time.monotonic() + self.stdout.write(" Rendering model states...", ending="") + self.stdout.flush() + elif action == "render_success": + elapsed = ( + " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" + ) + self.stdout.write(self.style.SUCCESS(" DONE" + elapsed)) + + def sync_apps(self, connection, app_labels): + """Run the old syncdb-style operation on a list of app_labels.""" + with connection.cursor() as cursor: + tables = connection.introspection.table_names(cursor) + + # Build the manifest of apps and models that are to be synchronized. + all_models = [ + ( + app_config.label, + router.get_migratable_models( + app_config, connection.alias, include_auto_created=False + ), + ) + for app_config in apps.get_app_configs() + if app_config.models_module is not None and app_config.label in app_labels + ] + + def model_installed(model): + opts = model._meta + converter = connection.introspection.identifier_converter + return not ( + (converter(opts.db_table) in tables) + or ( + opts.auto_created + and converter(opts.auto_created._meta.db_table) in tables + ) + ) + + manifest = { + app_name: list(filter(model_installed, model_list)) + for app_name, model_list in all_models + } + + # Create the tables for each model + if self.verbosity >= 1: + self.stdout.write(" Creating tables...") + with connection.schema_editor() as editor: + for app_name, model_list in manifest.items(): + for model in model_list: + # Never install unmanaged models, etc. + if not model._meta.can_migrate(connection): + continue + if self.verbosity >= 3: + self.stdout.write( + " Processing %s.%s model" + % (app_name, model._meta.object_name) + ) + if self.verbosity >= 1: + self.stdout.write( + " Creating table %s" % model._meta.db_table + ) + editor.create_model(model) + + # Deferred SQL is executed when exiting the editor's context. + if self.verbosity >= 1: + self.stdout.write(" Running deferred SQL...") + + @staticmethod + def describe_operation(operation, backwards): + """Return a string that describes a migration operation for --plan.""" + prefix = "" + is_error = False + if hasattr(operation, "code"): + code = operation.reverse_code if backwards else operation.code + action = (code.__doc__ or "") if code else None + elif hasattr(operation, "sql"): + action = operation.reverse_sql if backwards else operation.sql + else: + action = "" + if backwards: + prefix = "Undo " + if action is not None: + action = str(action).replace("\n", "") + elif backwards: + action = "IRREVERSIBLE" + is_error = True + if action: + action = " -> " + action + truncated = Truncator(action) + return prefix + operation.describe() + truncated.chars(40), is_error diff --git a/testbed/django__django/django/core/management/commands/optimizemigration.py b/testbed/django__django/django/core/management/commands/optimizemigration.py new file mode 100644 index 0000000000000000000000000000000000000000..2064dfbf3cba3647b72e0f1fccad57228685913e --- /dev/null +++ b/testbed/django__django/django/core/management/commands/optimizemigration.py @@ -0,0 +1,129 @@ +import shutil +import sys + +from django.apps import apps +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import run_formatters +from django.db import migrations +from django.db.migrations.exceptions import AmbiguityError +from django.db.migrations.loader import MigrationLoader +from django.db.migrations.optimizer import MigrationOptimizer +from django.db.migrations.writer import MigrationWriter +from django.utils.version import get_docs_version + + +class Command(BaseCommand): + help = "Optimizes the operations for the named migration." + + def add_arguments(self, parser): + parser.add_argument( + "app_label", + help="App label of the application to optimize the migration for.", + ) + parser.add_argument( + "migration_name", help="Migration name to optimize the operations for." + ) + parser.add_argument( + "--check", + action="store_true", + help="Exit with a non-zero status if the migration can be optimized.", + ) + + def handle(self, *args, **options): + verbosity = options["verbosity"] + app_label = options["app_label"] + migration_name = options["migration_name"] + check = options["check"] + + # Validate app_label. + try: + apps.get_app_config(app_label) + except LookupError as err: + raise CommandError(str(err)) + + # Load the current graph state. + loader = MigrationLoader(None) + if app_label not in loader.migrated_apps: + raise CommandError(f"App '{app_label}' does not have migrations.") + # Find a migration. + try: + migration = loader.get_migration_by_prefix(app_label, migration_name) + except AmbiguityError: + raise CommandError( + f"More than one migration matches '{migration_name}' in app " + f"'{app_label}'. Please be more specific." + ) + except KeyError: + raise CommandError( + f"Cannot find a migration matching '{migration_name}' from app " + f"'{app_label}'." + ) + + # Optimize the migration. + optimizer = MigrationOptimizer() + new_operations = optimizer.optimize(migration.operations, migration.app_label) + if len(migration.operations) == len(new_operations): + if verbosity > 0: + self.stdout.write("No optimizations possible.") + return + else: + if verbosity > 0: + self.stdout.write( + "Optimizing from %d operations to %d operations." + % (len(migration.operations), len(new_operations)) + ) + if check: + sys.exit(1) + + # Set the new migration optimizations. + migration.operations = new_operations + + # Write out the optimized migration file. + writer = MigrationWriter(migration) + migration_file_string = writer.as_string() + if writer.needs_manual_porting: + if migration.replaces: + raise CommandError( + "Migration will require manual porting but is already a squashed " + "migration.\nTransition to a normal migration first: " + "https://docs.djangoproject.com/en/%s/topics/migrations/" + "#squashing-migrations" % get_docs_version() + ) + # Make a new migration with those operations. + subclass = type( + "Migration", + (migrations.Migration,), + { + "dependencies": migration.dependencies, + "operations": new_operations, + "replaces": [(migration.app_label, migration.name)], + }, + ) + optimized_migration_name = "%s_optimized" % migration.name + optimized_migration = subclass(optimized_migration_name, app_label) + writer = MigrationWriter(optimized_migration) + migration_file_string = writer.as_string() + if verbosity > 0: + self.stdout.write( + self.style.MIGRATE_HEADING("Manual porting required") + "\n" + " Your migrations contained functions that must be manually " + "copied over,\n" + " as we could not safely copy their implementation.\n" + " See the comment at the top of the optimized migration for " + "details." + ) + if shutil.which("black"): + self.stdout.write( + self.style.WARNING( + "Optimized migration couldn't be formatted using the " + '"black" command. You can call it manually.' + ) + ) + with open(writer.path, "w", encoding="utf-8") as fh: + fh.write(migration_file_string) + run_formatters([writer.path]) + + if verbosity > 0: + self.stdout.write( + self.style.MIGRATE_HEADING(f"Optimized migration {writer.path}") + ) diff --git a/testbed/django__django/django/core/management/commands/runserver.py b/testbed/django__django/django/core/management/commands/runserver.py new file mode 100644 index 0000000000000000000000000000000000000000..26bbf29d680ca1c437f2f6ff14954dc52f2624dd --- /dev/null +++ b/testbed/django__django/django/core/management/commands/runserver.py @@ -0,0 +1,186 @@ +import errno +import os +import re +import socket +import sys +from datetime import datetime + +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.core.servers.basehttp import WSGIServer, get_internal_wsgi_application, run +from django.utils import autoreload +from django.utils.regex_helper import _lazy_re_compile + +naiveip_re = _lazy_re_compile( + r"""^(?: +(?P + (?P\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address + (?P\[[a-fA-F0-9:]+\]) | # IPv6 address + (?P[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN +):)?(?P\d+)$""", + re.X, +) + + +class Command(BaseCommand): + help = "Starts a lightweight web server for development." + + # Validation is called explicitly each time the server is reloaded. + requires_system_checks = [] + stealth_options = ("shutdown_message",) + suppressed_base_arguments = {"--verbosity", "--traceback"} + + default_addr = "127.0.0.1" + default_addr_ipv6 = "::1" + default_port = "8000" + protocol = "http" + server_cls = WSGIServer + + def add_arguments(self, parser): + parser.add_argument( + "addrport", nargs="?", help="Optional port number, or ipaddr:port" + ) + parser.add_argument( + "--ipv6", + "-6", + action="store_true", + dest="use_ipv6", + help="Tells Django to use an IPv6 address.", + ) + parser.add_argument( + "--nothreading", + action="store_false", + dest="use_threading", + help="Tells Django to NOT use threading.", + ) + parser.add_argument( + "--noreload", + action="store_false", + dest="use_reloader", + help="Tells Django to NOT use the auto-reloader.", + ) + parser.add_argument( + "--skip-checks", + action="store_true", + help="Skip system checks.", + ) + + def execute(self, *args, **options): + if options["no_color"]: + # We rely on the environment because it's currently the only + # way to reach WSGIRequestHandler. This seems an acceptable + # compromise considering `runserver` runs indefinitely. + os.environ["DJANGO_COLORS"] = "nocolor" + super().execute(*args, **options) + + def get_handler(self, *args, **options): + """Return the default WSGI handler for the runner.""" + return get_internal_wsgi_application() + + def handle(self, *args, **options): + if not settings.DEBUG and not settings.ALLOWED_HOSTS: + raise CommandError("You must set settings.ALLOWED_HOSTS if DEBUG is False.") + + self.use_ipv6 = options["use_ipv6"] + if self.use_ipv6 and not socket.has_ipv6: + raise CommandError("Your Python does not support IPv6.") + self._raw_ipv6 = False + if not options["addrport"]: + self.addr = "" + self.port = self.default_port + else: + m = re.match(naiveip_re, options["addrport"]) + if m is None: + raise CommandError( + '"%s" is not a valid port number ' + "or address:port pair." % options["addrport"] + ) + self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups() + if not self.port.isdigit(): + raise CommandError("%r is not a valid port number." % self.port) + if self.addr: + if _ipv6: + self.addr = self.addr[1:-1] + self.use_ipv6 = True + self._raw_ipv6 = True + elif self.use_ipv6 and not _fqdn: + raise CommandError('"%s" is not a valid IPv6 address.' % self.addr) + if not self.addr: + self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr + self._raw_ipv6 = self.use_ipv6 + self.run(**options) + + def run(self, **options): + """Run the server, using the autoreloader if needed.""" + use_reloader = options["use_reloader"] + + if use_reloader: + autoreload.run_with_reloader(self.inner_run, **options) + else: + self.inner_run(None, **options) + + def inner_run(self, *args, **options): + # If an exception was silenced in ManagementUtility.execute in order + # to be raised in the child process, raise it now. + autoreload.raise_last_exception() + + threading = options["use_threading"] + # 'shutdown_message' is a stealth option. + shutdown_message = options.get("shutdown_message", "") + + if not options["skip_checks"]: + self.stdout.write("Performing system checks...\n\n") + self.check(display_num_errors=True) + # Need to check migrations here, so can't use the + # requires_migrations_check attribute. + self.check_migrations() + + try: + handler = self.get_handler(*args, **options) + run( + self.addr, + int(self.port), + handler, + ipv6=self.use_ipv6, + threading=threading, + on_bind=self.on_bind, + server_cls=self.server_cls, + ) + except OSError as e: + # Use helpful error messages instead of ugly tracebacks. + ERRORS = { + errno.EACCES: "You don't have permission to access that port.", + errno.EADDRINUSE: "That port is already in use.", + errno.EADDRNOTAVAIL: "That IP address can't be assigned to.", + } + try: + error_text = ERRORS[e.errno] + except KeyError: + error_text = e + self.stderr.write("Error: %s" % error_text) + # Need to use an OS exit because sys.exit doesn't work in a thread + os._exit(1) + except KeyboardInterrupt: + if shutdown_message: + self.stdout.write(shutdown_message) + sys.exit(0) + + def on_bind(self, server_port): + quit_command = "CTRL-BREAK" if sys.platform == "win32" else "CONTROL-C" + + if self._raw_ipv6: + addr = f"[{self.addr}]" + elif self.addr == "0": + addr = "0.0.0.0" + else: + addr = self.addr + + now = datetime.now().strftime("%B %d, %Y - %X") + version = self.get_version() + print( + f"{now}\n" + f"Django version {version}, using settings {settings.SETTINGS_MODULE!r}\n" + f"Starting development server at {self.protocol}://{addr}:{server_port}/\n" + f"Quit the server with {quit_command}.", + file=self.stdout, + ) diff --git a/testbed/django__django/django/core/management/commands/shell.py b/testbed/django__django/django/core/management/commands/shell.py new file mode 100644 index 0000000000000000000000000000000000000000..f55b346406aa87ec31bc1f3248f6c10f9ade72df --- /dev/null +++ b/testbed/django__django/django/core/management/commands/shell.py @@ -0,0 +1,139 @@ +import os +import select +import sys +import traceback + +from django.core.management import BaseCommand, CommandError +from django.utils.datastructures import OrderedSet + + +class Command(BaseCommand): + help = ( + "Runs a Python interactive interpreter. Tries to use IPython or " + "bpython, if one of them is available. Any standard input is executed " + "as code." + ) + + requires_system_checks = [] + shells = ["ipython", "bpython", "python"] + + def add_arguments(self, parser): + parser.add_argument( + "--no-startup", + action="store_true", + help=( + "When using plain Python, ignore the PYTHONSTARTUP environment " + "variable and ~/.pythonrc.py script." + ), + ) + parser.add_argument( + "-i", + "--interface", + choices=self.shells, + help=( + "Specify an interactive interpreter interface. Available options: " + '"ipython", "bpython", and "python"' + ), + ) + parser.add_argument( + "-c", + "--command", + help=( + "Instead of opening an interactive shell, run a command as Django and " + "exit." + ), + ) + + def ipython(self, options): + from IPython import start_ipython + + start_ipython(argv=[]) + + def bpython(self, options): + import bpython + + bpython.embed() + + def python(self, options): + import code + + # Set up a dictionary to serve as the environment for the shell. + imported_objects = {} + + # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system + # conventions and get $PYTHONSTARTUP first then .pythonrc.py. + if not options["no_startup"]: + for pythonrc in OrderedSet( + [os.environ.get("PYTHONSTARTUP"), os.path.expanduser("~/.pythonrc.py")] + ): + if not pythonrc: + continue + if not os.path.isfile(pythonrc): + continue + with open(pythonrc) as handle: + pythonrc_code = handle.read() + # Match the behavior of the cpython shell where an error in + # PYTHONSTARTUP prints an exception and continues. + try: + exec(compile(pythonrc_code, pythonrc, "exec"), imported_objects) + except Exception: + traceback.print_exc() + + # By default, this will set up readline to do tab completion and to read and + # write history to the .python_history file, but this can be overridden by + # $PYTHONSTARTUP or ~/.pythonrc.py. + try: + hook = sys.__interactivehook__ + except AttributeError: + # Match the behavior of the cpython shell where a missing + # sys.__interactivehook__ is ignored. + pass + else: + try: + hook() + except Exception: + # Match the behavior of the cpython shell where an error in + # sys.__interactivehook__ prints a warning and the exception + # and continues. + print("Failed calling sys.__interactivehook__") + traceback.print_exc() + + # Set up tab completion for objects imported by $PYTHONSTARTUP or + # ~/.pythonrc.py. + try: + import readline + import rlcompleter + + readline.set_completer(rlcompleter.Completer(imported_objects).complete) + except ImportError: + pass + + # Start the interactive interpreter. + code.interact(local=imported_objects) + + def handle(self, **options): + # Execute the command and exit. + if options["command"]: + exec(options["command"], globals()) + return + + # Execute stdin if it has anything to read and exit. + # Not supported on Windows due to select.select() limitations. + if ( + sys.platform != "win32" + and not sys.stdin.isatty() + and select.select([sys.stdin], [], [], 0)[0] + ): + exec(sys.stdin.read(), globals()) + return + + available_shells = ( + [options["interface"]] if options["interface"] else self.shells + ) + + for shell in available_shells: + try: + return getattr(self, shell)(options) + except ImportError: + pass + raise CommandError("Couldn't import {} interface.".format(shell)) diff --git a/testbed/django__django/django/core/management/commands/showmigrations.py b/testbed/django__django/django/core/management/commands/showmigrations.py new file mode 100644 index 0000000000000000000000000000000000000000..203f92151d21d2695a252270c627dea5d1f0b37e --- /dev/null +++ b/testbed/django__django/django/core/management/commands/showmigrations.py @@ -0,0 +1,176 @@ +import sys + +from django.apps import apps +from django.core.management.base import BaseCommand +from django.db import DEFAULT_DB_ALIAS, connections +from django.db.migrations.loader import MigrationLoader +from django.db.migrations.recorder import MigrationRecorder + + +class Command(BaseCommand): + help = "Shows all available migrations for the current project" + + def add_arguments(self, parser): + parser.add_argument( + "app_label", + nargs="*", + help="App labels of applications to limit the output to.", + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + "Nominates a database to show migrations for. Defaults to the " + '"default" database.' + ), + ) + + formats = parser.add_mutually_exclusive_group() + formats.add_argument( + "--list", + "-l", + action="store_const", + dest="format", + const="list", + help=( + "Shows a list of all migrations and which are applied. " + "With a verbosity level of 2 or above, the applied datetimes " + "will be included." + ), + ) + formats.add_argument( + "--plan", + "-p", + action="store_const", + dest="format", + const="plan", + help=( + "Shows all migrations in the order they will be applied. With a " + "verbosity level of 2 or above all direct migration dependencies and " + "reverse dependencies (run_before) will be included." + ), + ) + + parser.set_defaults(format="list") + + def handle(self, *args, **options): + self.verbosity = options["verbosity"] + + # Get the database we're operating from + db = options["database"] + connection = connections[db] + + if options["format"] == "plan": + return self.show_plan(connection, options["app_label"]) + else: + return self.show_list(connection, options["app_label"]) + + def _validate_app_names(self, loader, app_names): + has_bad_names = False + for app_name in app_names: + try: + apps.get_app_config(app_name) + except LookupError as err: + self.stderr.write(str(err)) + has_bad_names = True + if has_bad_names: + sys.exit(2) + + def show_list(self, connection, app_names=None): + """ + Show a list of all migrations on the system, or only those of + some named apps. + """ + # Load migrations from disk/DB + loader = MigrationLoader(connection, ignore_no_migrations=True) + recorder = MigrationRecorder(connection) + recorded_migrations = recorder.applied_migrations() + graph = loader.graph + # If we were passed a list of apps, validate it + if app_names: + self._validate_app_names(loader, app_names) + # Otherwise, show all apps in alphabetic order + else: + app_names = sorted(loader.migrated_apps) + # For each app, print its migrations in order from oldest (roots) to + # newest (leaves). + for app_name in app_names: + self.stdout.write(app_name, self.style.MIGRATE_LABEL) + shown = set() + for node in graph.leaf_nodes(app_name): + for plan_node in graph.forwards_plan(node): + if plan_node not in shown and plan_node[0] == app_name: + # Give it a nice title if it's a squashed one + title = plan_node[1] + if graph.nodes[plan_node].replaces: + title += " (%s squashed migrations)" % len( + graph.nodes[plan_node].replaces + ) + applied_migration = loader.applied_migrations.get(plan_node) + # Mark it as applied/unapplied + if applied_migration: + if plan_node in recorded_migrations: + output = " [X] %s" % title + else: + title += " Run 'manage.py migrate' to finish recording." + output = " [-] %s" % title + if self.verbosity >= 2 and hasattr( + applied_migration, "applied" + ): + output += ( + " (applied at %s)" + % applied_migration.applied.strftime( + "%Y-%m-%d %H:%M:%S" + ) + ) + self.stdout.write(output) + else: + self.stdout.write(" [ ] %s" % title) + shown.add(plan_node) + # If we didn't print anything, then a small message + if not shown: + self.stdout.write(" (no migrations)", self.style.ERROR) + + def show_plan(self, connection, app_names=None): + """ + Show all known migrations (or only those of the specified app_names) + in the order they will be applied. + """ + # Load migrations from disk/DB + loader = MigrationLoader(connection) + graph = loader.graph + if app_names: + self._validate_app_names(loader, app_names) + targets = [key for key in graph.leaf_nodes() if key[0] in app_names] + else: + targets = graph.leaf_nodes() + plan = [] + seen = set() + + # Generate the plan + for target in targets: + for migration in graph.forwards_plan(target): + if migration not in seen: + node = graph.node_map[migration] + plan.append(node) + seen.add(migration) + + # Output + def print_deps(node): + out = [] + for parent in sorted(node.parents): + out.append("%s.%s" % parent.key) + if out: + return " ... (%s)" % ", ".join(out) + return "" + + for node in plan: + deps = "" + if self.verbosity >= 2: + deps = print_deps(node) + if node.key in loader.applied_migrations: + self.stdout.write("[X] %s.%s%s" % (node.key[0], node.key[1], deps)) + else: + self.stdout.write("[ ] %s.%s%s" % (node.key[0], node.key[1], deps)) + if not plan: + self.stdout.write("(no migrations)", self.style.ERROR) diff --git a/testbed/django__django/django/core/management/commands/sqlflush.py b/testbed/django__django/django/core/management/commands/sqlflush.py new file mode 100644 index 0000000000000000000000000000000000000000..bc82e1f05ff90fe72fff6e6aa668fa9ada695ba5 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/sqlflush.py @@ -0,0 +1,29 @@ +from django.core.management.base import BaseCommand +from django.core.management.sql import sql_flush +from django.db import DEFAULT_DB_ALIAS, connections + + +class Command(BaseCommand): + help = ( + "Returns a list of the SQL statements required to return all tables in " + "the database to the state they were in just after they were installed." + ) + + output_transaction = True + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + 'Nominates a database to print the SQL for. Defaults to the "default" ' + "database." + ), + ) + + def handle(self, **options): + sql_statements = sql_flush(self.style, connections[options["database"]]) + if not sql_statements and options["verbosity"] >= 1: + self.stderr.write("No tables found.") + return "\n".join(sql_statements) diff --git a/testbed/django__django/django/core/management/commands/sqlmigrate.py b/testbed/django__django/django/core/management/commands/sqlmigrate.py new file mode 100644 index 0000000000000000000000000000000000000000..2f6993682f6be43c434189eb51f5b9de411538f3 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/sqlmigrate.py @@ -0,0 +1,83 @@ +from django.apps import apps +from django.core.management.base import BaseCommand, CommandError +from django.db import DEFAULT_DB_ALIAS, connections +from django.db.migrations.loader import AmbiguityError, MigrationLoader + + +class Command(BaseCommand): + help = "Prints the SQL statements for the named migration." + + output_transaction = True + + def add_arguments(self, parser): + parser.add_argument( + "app_label", help="App label of the application containing the migration." + ) + parser.add_argument( + "migration_name", help="Migration name to print the SQL for." + ) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + 'Nominates a database to create SQL for. Defaults to the "default" ' + "database." + ), + ) + parser.add_argument( + "--backwards", + action="store_true", + help="Creates SQL to unapply the migration, rather than to apply it", + ) + + def execute(self, *args, **options): + # sqlmigrate doesn't support coloring its output but we need to force + # no_color=True so that the BEGIN/COMMIT statements added by + # output_transaction don't get colored either. + options["no_color"] = True + return super().execute(*args, **options) + + def handle(self, *args, **options): + # Get the database we're operating from + connection = connections[options["database"]] + + # Load up a loader to get all the migration data, but don't replace + # migrations. + loader = MigrationLoader(connection, replace_migrations=False) + + # Resolve command-line arguments into a migration + app_label, migration_name = options["app_label"], options["migration_name"] + # Validate app_label + try: + apps.get_app_config(app_label) + except LookupError as err: + raise CommandError(str(err)) + if app_label not in loader.migrated_apps: + raise CommandError("App '%s' does not have migrations" % app_label) + try: + migration = loader.get_migration_by_prefix(app_label, migration_name) + except AmbiguityError: + raise CommandError( + "More than one migration matches '%s' in app '%s'. Please be more " + "specific." % (migration_name, app_label) + ) + except KeyError: + raise CommandError( + "Cannot find a migration matching '%s' from app '%s'. Is it in " + "INSTALLED_APPS?" % (migration_name, app_label) + ) + target = (app_label, migration.name) + + # Show begin/end around output for atomic migrations, if the database + # supports transactional DDL. + self.output_transaction = ( + migration.atomic and connection.features.can_rollback_ddl + ) + + # Make a plan that represents just the requested migrations and show SQL + # for it + plan = [(loader.graph.nodes[target], options["backwards"])] + sql_statements = loader.collect_sql(plan) + if not sql_statements and options["verbosity"] >= 1: + self.stderr.write("No operations found.") + return "\n".join(sql_statements) diff --git a/testbed/django__django/django/core/management/commands/sqlsequencereset.py b/testbed/django__django/django/core/management/commands/sqlsequencereset.py new file mode 100644 index 0000000000000000000000000000000000000000..9653fa59d0073ee9ddc0beca5198b37a46a00b17 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/sqlsequencereset.py @@ -0,0 +1,31 @@ +from django.core.management.base import AppCommand +from django.db import DEFAULT_DB_ALIAS, connections + + +class Command(AppCommand): + help = ( + "Prints the SQL statements for resetting sequences for the given app name(s)." + ) + + output_transaction = True + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + 'Nominates a database to print the SQL for. Defaults to the "default" ' + "database." + ), + ) + + def handle_app_config(self, app_config, **options): + if app_config.models_module is None: + return + connection = connections[options["database"]] + models = app_config.get_models(include_auto_created=True) + statements = connection.ops.sequence_reset_sql(self.style, models) + if not statements and options["verbosity"] >= 1: + self.stderr.write("No sequences found.") + return "\n".join(statements) diff --git a/testbed/django__django/django/core/management/commands/squashmigrations.py b/testbed/django__django/django/core/management/commands/squashmigrations.py new file mode 100644 index 0000000000000000000000000000000000000000..6571cbc2e2153c9c4e240c88bf1d846096e62a20 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/squashmigrations.py @@ -0,0 +1,267 @@ +import os +import shutil + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import run_formatters +from django.db import DEFAULT_DB_ALIAS, connections, migrations +from django.db.migrations.loader import AmbiguityError, MigrationLoader +from django.db.migrations.migration import SwappableTuple +from django.db.migrations.optimizer import MigrationOptimizer +from django.db.migrations.writer import MigrationWriter +from django.utils.version import get_docs_version + + +class Command(BaseCommand): + help = ( + "Squashes an existing set of migrations (from first until specified) into a " + "single new one." + ) + + def add_arguments(self, parser): + parser.add_argument( + "app_label", + help="App label of the application to squash migrations for.", + ) + parser.add_argument( + "start_migration_name", + nargs="?", + help=( + "Migrations will be squashed starting from and including this " + "migration." + ), + ) + parser.add_argument( + "migration_name", + help="Migrations will be squashed until and including this migration.", + ) + parser.add_argument( + "--no-optimize", + action="store_true", + help="Do not try to optimize the squashed operations.", + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--squashed-name", + help="Sets the name of the new squashed migration.", + ) + parser.add_argument( + "--no-header", + action="store_false", + dest="include_header", + help="Do not add a header comment to the new squashed migration.", + ) + + def handle(self, **options): + self.verbosity = options["verbosity"] + self.interactive = options["interactive"] + app_label = options["app_label"] + start_migration_name = options["start_migration_name"] + migration_name = options["migration_name"] + no_optimize = options["no_optimize"] + squashed_name = options["squashed_name"] + include_header = options["include_header"] + # Validate app_label. + try: + apps.get_app_config(app_label) + except LookupError as err: + raise CommandError(str(err)) + # Load the current graph state, check the app and migration they asked + # for exists. + loader = MigrationLoader(connections[DEFAULT_DB_ALIAS]) + if app_label not in loader.migrated_apps: + raise CommandError( + "App '%s' does not have migrations (so squashmigrations on " + "it makes no sense)" % app_label + ) + + migration = self.find_migration(loader, app_label, migration_name) + + # Work out the list of predecessor migrations + migrations_to_squash = [ + loader.get_migration(al, mn) + for al, mn in loader.graph.forwards_plan( + (migration.app_label, migration.name) + ) + if al == migration.app_label + ] + + if start_migration_name: + start_migration = self.find_migration( + loader, app_label, start_migration_name + ) + start = loader.get_migration( + start_migration.app_label, start_migration.name + ) + try: + start_index = migrations_to_squash.index(start) + migrations_to_squash = migrations_to_squash[start_index:] + except ValueError: + raise CommandError( + "The migration '%s' cannot be found. Maybe it comes after " + "the migration '%s'?\n" + "Have a look at:\n" + " python manage.py showmigrations %s\n" + "to debug this issue." % (start_migration, migration, app_label) + ) + + # Tell them what we're doing and optionally ask if we should proceed + if self.verbosity > 0 or self.interactive: + self.stdout.write( + self.style.MIGRATE_HEADING("Will squash the following migrations:") + ) + for migration in migrations_to_squash: + self.stdout.write(" - %s" % migration.name) + + if self.interactive: + answer = None + while not answer or answer not in "yn": + answer = input("Do you wish to proceed? [yN] ") + if not answer: + answer = "n" + break + else: + answer = answer[0].lower() + if answer != "y": + return + + # Load the operations from all those migrations and concat together, + # along with collecting external dependencies and detecting + # double-squashing + operations = [] + dependencies = set() + # We need to take all dependencies from the first migration in the list + # as it may be 0002 depending on 0001 + first_migration = True + for smigration in migrations_to_squash: + if smigration.replaces: + raise CommandError( + "You cannot squash squashed migrations! Please transition it to a " + "normal migration first: https://docs.djangoproject.com/en/%s/" + "topics/migrations/#squashing-migrations" % get_docs_version() + ) + operations.extend(smigration.operations) + for dependency in smigration.dependencies: + if isinstance(dependency, SwappableTuple): + if settings.AUTH_USER_MODEL == dependency.setting: + dependencies.add(("__setting__", "AUTH_USER_MODEL")) + else: + dependencies.add(dependency) + elif dependency[0] != smigration.app_label or first_migration: + dependencies.add(dependency) + first_migration = False + + if no_optimize: + if self.verbosity > 0: + self.stdout.write( + self.style.MIGRATE_HEADING("(Skipping optimization.)") + ) + new_operations = operations + else: + if self.verbosity > 0: + self.stdout.write(self.style.MIGRATE_HEADING("Optimizing...")) + + optimizer = MigrationOptimizer() + new_operations = optimizer.optimize(operations, migration.app_label) + + if self.verbosity > 0: + if len(new_operations) == len(operations): + self.stdout.write(" No optimizations possible.") + else: + self.stdout.write( + " Optimized from %s operations to %s operations." + % (len(operations), len(new_operations)) + ) + + # Work out the value of replaces (any squashed ones we're re-squashing) + # need to feed their replaces into ours + replaces = [] + for migration in migrations_to_squash: + if migration.replaces: + replaces.extend(migration.replaces) + else: + replaces.append((migration.app_label, migration.name)) + + # Make a new migration with those operations + subclass = type( + "Migration", + (migrations.Migration,), + { + "dependencies": dependencies, + "operations": new_operations, + "replaces": replaces, + }, + ) + if start_migration_name: + if squashed_name: + # Use the name from --squashed-name. + prefix, _ = start_migration.name.split("_", 1) + name = "%s_%s" % (prefix, squashed_name) + else: + # Generate a name. + name = "%s_squashed_%s" % (start_migration.name, migration.name) + new_migration = subclass(name, app_label) + else: + name = "0001_%s" % (squashed_name or "squashed_%s" % migration.name) + new_migration = subclass(name, app_label) + new_migration.initial = True + + # Write out the new migration file + writer = MigrationWriter(new_migration, include_header) + if os.path.exists(writer.path): + raise CommandError( + f"Migration {new_migration.name} already exists. Use a different name." + ) + with open(writer.path, "w", encoding="utf-8") as fh: + fh.write(writer.as_string()) + run_formatters([writer.path]) + + if self.verbosity > 0: + self.stdout.write( + self.style.MIGRATE_HEADING( + "Created new squashed migration %s" % writer.path + ) + + "\n" + " You should commit this migration but leave the old ones in place;\n" + " the new migration will be used for new installs. Once you are sure\n" + " all instances of the codebase have applied the migrations you " + "squashed,\n" + " you can delete them." + ) + if writer.needs_manual_porting: + self.stdout.write( + self.style.MIGRATE_HEADING("Manual porting required") + "\n" + " Your migrations contained functions that must be manually " + "copied over,\n" + " as we could not safely copy their implementation.\n" + " See the comment at the top of the squashed migration for " + "details." + ) + if shutil.which("black"): + self.stdout.write( + self.style.WARNING( + "Squashed migration couldn't be formatted using the " + '"black" command. You can call it manually.' + ) + ) + + def find_migration(self, loader, app_label, name): + try: + return loader.get_migration_by_prefix(app_label, name) + except AmbiguityError: + raise CommandError( + "More than one migration matches '%s' in app '%s'. Please be " + "more specific." % (name, app_label) + ) + except KeyError: + raise CommandError( + "Cannot find a migration matching '%s' from app '%s'." + % (name, app_label) + ) diff --git a/testbed/django__django/django/core/management/commands/startapp.py b/testbed/django__django/django/core/management/commands/startapp.py new file mode 100644 index 0000000000000000000000000000000000000000..e85833b9a8273b2361fa07a8a0967babdd6ceba8 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/startapp.py @@ -0,0 +1,14 @@ +from django.core.management.templates import TemplateCommand + + +class Command(TemplateCommand): + help = ( + "Creates a Django app directory structure for the given app name in " + "the current directory or optionally in the given directory." + ) + missing_args_message = "You must provide an application name." + + def handle(self, **options): + app_name = options.pop("name") + target = options.pop("directory") + super().handle("app", app_name, target, **options) diff --git a/testbed/django__django/django/core/management/commands/startproject.py b/testbed/django__django/django/core/management/commands/startproject.py new file mode 100644 index 0000000000000000000000000000000000000000..ca17fa54cd6e90125dec1648cdc99b197c20ed58 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/startproject.py @@ -0,0 +1,21 @@ +from django.core.checks.security.base import SECRET_KEY_INSECURE_PREFIX +from django.core.management.templates import TemplateCommand + +from ..utils import get_random_secret_key + + +class Command(TemplateCommand): + help = ( + "Creates a Django project directory structure for the given project " + "name in the current directory or optionally in the given directory." + ) + missing_args_message = "You must provide a project name." + + def handle(self, **options): + project_name = options.pop("name") + target = options.pop("directory") + + # Create a random SECRET_KEY to put it in the main settings. + options["secret_key"] = SECRET_KEY_INSECURE_PREFIX + get_random_secret_key() + + super().handle("project", project_name, target, **options) diff --git a/testbed/django__django/django/core/management/commands/test.py b/testbed/django__django/django/core/management/commands/test.py new file mode 100644 index 0000000000000000000000000000000000000000..2df6dbbecf9ccb1c708bd89e22d1a325dd524d07 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/test.py @@ -0,0 +1,71 @@ +import sys + +from django.conf import settings +from django.core.management.base import BaseCommand +from django.core.management.utils import get_command_line_option +from django.test.runner import get_max_test_processes +from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner + + +class Command(BaseCommand): + help = "Discover and run tests in the specified modules or the current directory." + + # DiscoverRunner runs the checks after databases are set up. + requires_system_checks = [] + test_runner = None + + def run_from_argv(self, argv): + """ + Pre-parse the command line to extract the value of the --testrunner + option. This allows a test runner to define additional command line + arguments. + """ + self.test_runner = get_command_line_option(argv, "--testrunner") + super().run_from_argv(argv) + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="test_label", + nargs="*", + help=( + "Module paths to test; can be modulename, modulename.TestCase or " + "modulename.TestCase.test_method" + ), + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--failfast", + action="store_true", + help="Tells Django to stop running the test suite after first failed test.", + ) + parser.add_argument( + "--testrunner", + help="Tells Django to use specified test runner class instead of " + "the one specified by the TEST_RUNNER setting.", + ) + + test_runner_class = get_runner(settings, self.test_runner) + + if hasattr(test_runner_class, "add_arguments"): + test_runner_class.add_arguments(parser) + + def handle(self, *test_labels, **options): + TestRunner = get_runner(settings, options["testrunner"]) + + time_keeper = TimeKeeper() if options.get("timing", False) else NullTimeKeeper() + parallel = options.get("parallel") + if parallel == "auto": + options["parallel"] = get_max_test_processes() + test_runner = TestRunner(**options) + with time_keeper.timed("Total run"): + failures = test_runner.run_tests(test_labels) + time_keeper.print_results() + if failures: + sys.exit(1) diff --git a/testbed/django__django/django/core/management/commands/testserver.py b/testbed/django__django/django/core/management/commands/testserver.py new file mode 100644 index 0000000000000000000000000000000000000000..caff6c65cdc49cfaf2c6ea77b395f5542d0fc7a5 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/testserver.py @@ -0,0 +1,65 @@ +from django.core.management import call_command +from django.core.management.base import BaseCommand +from django.db import connection + + +class Command(BaseCommand): + help = "Runs a development server with data from the given fixture(s)." + + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument( + "args", + metavar="fixture", + nargs="*", + help="Path(s) to fixtures to load before running the server.", + ) + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Tells Django to NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--addrport", + default="", + help="Port number or ipaddr:port to run the server on.", + ) + parser.add_argument( + "--ipv6", + "-6", + action="store_true", + dest="use_ipv6", + help="Tells Django to use an IPv6 address.", + ) + + def handle(self, *fixture_labels, **options): + verbosity = options["verbosity"] + interactive = options["interactive"] + + # Create a test database. + db_name = connection.creation.create_test_db( + verbosity=verbosity, autoclobber=not interactive, serialize=False + ) + + # Import the fixture data into the test database. + call_command("loaddata", *fixture_labels, **{"verbosity": verbosity}) + + # Run the development server. Turn off auto-reloading because it causes + # a strange error -- it causes this handle() method to be called + # multiple times. + shutdown_message = ( + "\nServer stopped.\nNote that the test database, %r, has not been " + "deleted. You can explore it on your own." % db_name + ) + use_threading = connection.features.test_db_allows_multiple_connections + call_command( + "runserver", + addrport=options["addrport"], + shutdown_message=shutdown_message, + use_reloader=False, + use_ipv6=options["use_ipv6"], + use_threading=use_threading, + ) diff --git a/testbed/django__django/django/core/management/sql.py b/testbed/django__django/django/core/management/sql.py new file mode 100644 index 0000000000000000000000000000000000000000..2375cc23abf30b5805a6ff45e022a67824e0881b --- /dev/null +++ b/testbed/django__django/django/core/management/sql.py @@ -0,0 +1,59 @@ +import sys + +from django.apps import apps +from django.db import models + + +def sql_flush(style, connection, reset_sequences=True, allow_cascade=False): + """ + Return a list of the SQL statements used to flush the database. + """ + tables = connection.introspection.django_table_names( + only_existing=True, include_views=False + ) + return connection.ops.sql_flush( + style, + tables, + reset_sequences=reset_sequences, + allow_cascade=allow_cascade, + ) + + +def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs): + # Emit the pre_migrate signal for every application. + for app_config in apps.get_app_configs(): + if app_config.models_module is None: + continue + if verbosity >= 2: + stdout = kwargs.get("stdout", sys.stdout) + stdout.write( + "Running pre-migrate handlers for application %s" % app_config.label + ) + models.signals.pre_migrate.send( + sender=app_config, + app_config=app_config, + verbosity=verbosity, + interactive=interactive, + using=db, + **kwargs, + ) + + +def emit_post_migrate_signal(verbosity, interactive, db, **kwargs): + # Emit the post_migrate signal for every application. + for app_config in apps.get_app_configs(): + if app_config.models_module is None: + continue + if verbosity >= 2: + stdout = kwargs.get("stdout", sys.stdout) + stdout.write( + "Running post-migrate handlers for application %s" % app_config.label + ) + models.signals.post_migrate.send( + sender=app_config, + app_config=app_config, + verbosity=verbosity, + interactive=interactive, + using=db, + **kwargs, + ) diff --git a/testbed/django__django/django/core/management/templates.py b/testbed/django__django/django/core/management/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..c9cdc255661f1ef09895dc77daf00740f7e52ef3 --- /dev/null +++ b/testbed/django__django/django/core/management/templates.py @@ -0,0 +1,406 @@ +import argparse +import mimetypes +import os +import posixpath +import shutil +import stat +import tempfile +from importlib import import_module +from urllib.request import build_opener + +import django +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.core.management.utils import ( + find_formatters, + handle_extensions, + run_formatters, +) +from django.template import Context, Engine +from django.utils import archive +from django.utils.http import parse_header_parameters +from django.utils.version import get_docs_version + + +class TemplateCommand(BaseCommand): + """ + Copy either a Django application layout template or a Django project + layout template into the specified directory. + + :param style: A color style object (see django.core.management.color). + :param app_or_project: The string 'app' or 'project'. + :param name: The name of the application or project. + :param directory: The directory to which the template should be copied. + :param options: The additional variables passed to project or app templates + """ + + requires_system_checks = [] + # The supported URL schemes + url_schemes = ["http", "https", "ftp"] + # Rewrite the following suffixes when determining the target filename. + rewrite_template_suffixes = ( + # Allow shipping invalid .py files without byte-compilation. + (".py-tpl", ".py"), + ) + + def add_arguments(self, parser): + parser.add_argument("name", help="Name of the application or project.") + parser.add_argument( + "directory", nargs="?", help="Optional destination directory" + ) + parser.add_argument( + "--template", help="The path or URL to load the template from." + ) + parser.add_argument( + "--extension", + "-e", + dest="extensions", + action="append", + default=["py"], + help='The file extension(s) to render (default: "py"). ' + "Separate multiple extensions with commas, or use " + "-e multiple times.", + ) + parser.add_argument( + "--name", + "-n", + dest="files", + action="append", + default=[], + help="The file name(s) to render. Separate multiple file names " + "with commas, or use -n multiple times.", + ) + parser.add_argument( + "--exclude", + "-x", + action="append", + default=argparse.SUPPRESS, + nargs="?", + const="", + help=( + "The directory name(s) to exclude, in addition to .git and " + "__pycache__. Can be used multiple times." + ), + ) + + def handle(self, app_or_project, name, target=None, **options): + self.app_or_project = app_or_project + self.a_or_an = "an" if app_or_project == "app" else "a" + self.paths_to_remove = [] + self.verbosity = options["verbosity"] + + self.validate_name(name) + + # if some directory is given, make sure it's nicely expanded + if target is None: + top_dir = os.path.join(os.getcwd(), name) + try: + os.makedirs(top_dir) + except FileExistsError: + raise CommandError("'%s' already exists" % top_dir) + except OSError as e: + raise CommandError(e) + else: + top_dir = os.path.abspath(os.path.expanduser(target)) + if app_or_project == "app": + self.validate_name(os.path.basename(top_dir), "directory") + if not os.path.exists(top_dir): + raise CommandError( + "Destination directory '%s' does not " + "exist, please create it first." % top_dir + ) + + # Find formatters, which are external executables, before input + # from the templates can sneak into the path. + formatter_paths = find_formatters() + + extensions = tuple(handle_extensions(options["extensions"])) + extra_files = [] + excluded_directories = [".git", "__pycache__"] + for file in options["files"]: + extra_files.extend(map(lambda x: x.strip(), file.split(","))) + if exclude := options.get("exclude"): + for directory in exclude: + excluded_directories.append(directory.strip()) + if self.verbosity >= 2: + self.stdout.write( + "Rendering %s template files with extensions: %s" + % (app_or_project, ", ".join(extensions)) + ) + self.stdout.write( + "Rendering %s template files with filenames: %s" + % (app_or_project, ", ".join(extra_files)) + ) + base_name = "%s_name" % app_or_project + base_subdir = "%s_template" % app_or_project + base_directory = "%s_directory" % app_or_project + camel_case_name = "camel_case_%s_name" % app_or_project + camel_case_value = "".join(x for x in name.title() if x != "_") + + context = Context( + { + **options, + base_name: name, + base_directory: top_dir, + camel_case_name: camel_case_value, + "docs_version": get_docs_version(), + "django_version": django.__version__, + }, + autoescape=False, + ) + + # Setup a stub settings environment for template rendering + if not settings.configured: + settings.configure() + django.setup() + + template_dir = self.handle_template(options["template"], base_subdir) + prefix_length = len(template_dir) + 1 + + for root, dirs, files in os.walk(template_dir): + path_rest = root[prefix_length:] + relative_dir = path_rest.replace(base_name, name) + if relative_dir: + target_dir = os.path.join(top_dir, relative_dir) + os.makedirs(target_dir, exist_ok=True) + + for dirname in dirs[:]: + if "exclude" not in options: + if dirname.startswith(".") or dirname == "__pycache__": + dirs.remove(dirname) + elif dirname in excluded_directories: + dirs.remove(dirname) + + for filename in files: + if filename.endswith((".pyo", ".pyc", ".py.class")): + # Ignore some files as they cause various breakages. + continue + old_path = os.path.join(root, filename) + new_path = os.path.join( + top_dir, relative_dir, filename.replace(base_name, name) + ) + for old_suffix, new_suffix in self.rewrite_template_suffixes: + if new_path.endswith(old_suffix): + new_path = new_path.removesuffix(old_suffix) + new_suffix + break # Only rewrite once + + if os.path.exists(new_path): + raise CommandError( + "%s already exists. Overlaying %s %s into an existing " + "directory won't replace conflicting files." + % ( + new_path, + self.a_or_an, + app_or_project, + ) + ) + + # Only render the Python files, as we don't want to + # accidentally render Django templates files + if new_path.endswith(extensions) or filename in extra_files: + with open(old_path, encoding="utf-8") as template_file: + content = template_file.read() + template = Engine().from_string(content) + content = template.render(context) + with open(new_path, "w", encoding="utf-8") as new_file: + new_file.write(content) + else: + shutil.copyfile(old_path, new_path) + + if self.verbosity >= 2: + self.stdout.write("Creating %s" % new_path) + try: + self.apply_umask(old_path, new_path) + self.make_writeable(new_path) + except OSError: + self.stderr.write( + "Notice: Couldn't set permission bits on %s. You're " + "probably using an uncommon filesystem setup. No " + "problem." % new_path, + self.style.NOTICE, + ) + + if self.paths_to_remove: + if self.verbosity >= 2: + self.stdout.write("Cleaning up temporary files.") + for path_to_remove in self.paths_to_remove: + if os.path.isfile(path_to_remove): + os.remove(path_to_remove) + else: + shutil.rmtree(path_to_remove) + + run_formatters([top_dir], **formatter_paths) + + def handle_template(self, template, subdir): + """ + Determine where the app or project templates are. + Use django.__path__[0] as the default because the Django install + directory isn't known. + """ + if template is None: + return os.path.join(django.__path__[0], "conf", subdir) + else: + template = template.removeprefix("file://") + expanded_template = os.path.expanduser(template) + expanded_template = os.path.normpath(expanded_template) + if os.path.isdir(expanded_template): + return expanded_template + if self.is_url(template): + # downloads the file and returns the path + absolute_path = self.download(template) + else: + absolute_path = os.path.abspath(expanded_template) + if os.path.exists(absolute_path): + return self.extract(absolute_path) + + raise CommandError( + "couldn't handle %s template %s." % (self.app_or_project, template) + ) + + def validate_name(self, name, name_or_dir="name"): + if name is None: + raise CommandError( + "you must provide {an} {app} name".format( + an=self.a_or_an, + app=self.app_or_project, + ) + ) + # Check it's a valid directory name. + if not name.isidentifier(): + raise CommandError( + "'{name}' is not a valid {app} {type}. Please make sure the " + "{type} is a valid identifier.".format( + name=name, + app=self.app_or_project, + type=name_or_dir, + ) + ) + # Check it cannot be imported. + try: + import_module(name) + except ImportError: + pass + else: + raise CommandError( + "'{name}' conflicts with the name of an existing Python " + "module and cannot be used as {an} {app} {type}. Please try " + "another {type}.".format( + name=name, + an=self.a_or_an, + app=self.app_or_project, + type=name_or_dir, + ) + ) + + def download(self, url): + """ + Download the given URL and return the file name. + """ + + def cleanup_url(url): + tmp = url.rstrip("/") + filename = tmp.split("/")[-1] + if url.endswith("/"): + display_url = tmp + "/" + else: + display_url = url + return filename, display_url + + prefix = "django_%s_template_" % self.app_or_project + tempdir = tempfile.mkdtemp(prefix=prefix, suffix="_download") + self.paths_to_remove.append(tempdir) + filename, display_url = cleanup_url(url) + + if self.verbosity >= 2: + self.stdout.write("Downloading %s" % display_url) + + the_path = os.path.join(tempdir, filename) + opener = build_opener() + opener.addheaders = [("User-Agent", f"Django/{django.__version__}")] + try: + with opener.open(url) as source, open(the_path, "wb") as target: + headers = source.info() + target.write(source.read()) + except OSError as e: + raise CommandError( + "couldn't download URL %s to %s: %s" % (url, filename, e) + ) + + used_name = the_path.split("/")[-1] + + # Trying to get better name from response headers + content_disposition = headers["content-disposition"] + if content_disposition: + _, params = parse_header_parameters(content_disposition) + guessed_filename = params.get("filename") or used_name + else: + guessed_filename = used_name + + # Falling back to content type guessing + ext = self.splitext(guessed_filename)[1] + content_type = headers["content-type"] + if not ext and content_type: + ext = mimetypes.guess_extension(content_type) + if ext: + guessed_filename += ext + + # Move the temporary file to a filename that has better + # chances of being recognized by the archive utils + if used_name != guessed_filename: + guessed_path = os.path.join(tempdir, guessed_filename) + shutil.move(the_path, guessed_path) + return guessed_path + + # Giving up + return the_path + + def splitext(self, the_path): + """ + Like os.path.splitext, but takes off .tar, too + """ + base, ext = posixpath.splitext(the_path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + def extract(self, filename): + """ + Extract the given file to a temporary directory and return + the path of the directory with the extracted content. + """ + prefix = "django_%s_template_" % self.app_or_project + tempdir = tempfile.mkdtemp(prefix=prefix, suffix="_extract") + self.paths_to_remove.append(tempdir) + if self.verbosity >= 2: + self.stdout.write("Extracting %s" % filename) + try: + archive.extract(filename, tempdir) + return tempdir + except (archive.ArchiveException, OSError) as e: + raise CommandError( + "couldn't extract file %s to %s: %s" % (filename, tempdir, e) + ) + + def is_url(self, template): + """Return True if the name looks like a URL.""" + if ":" not in template: + return False + scheme = template.split(":", 1)[0].lower() + return scheme in self.url_schemes + + def apply_umask(self, old_path, new_path): + current_umask = os.umask(0) + os.umask(current_umask) + current_mode = stat.S_IMODE(os.stat(old_path).st_mode) + os.chmod(new_path, current_mode & ~current_umask) + + def make_writeable(self, filename): + """ + Make sure that the file is writeable. + Useful if our source is read-only. + """ + if not os.access(filename, os.W_OK): + st = os.stat(filename) + new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR + os.chmod(filename, new_permissions) diff --git a/testbed/django__django/django/core/management/utils.py b/testbed/django__django/django/core/management/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fca61f2c2347084a95d3705aa70e83224af6c029 --- /dev/null +++ b/testbed/django__django/django/core/management/utils.py @@ -0,0 +1,175 @@ +import fnmatch +import os +import shutil +import subprocess +from pathlib import Path +from subprocess import run + +from django.apps import apps as installed_apps +from django.utils.crypto import get_random_string +from django.utils.encoding import DEFAULT_LOCALE_ENCODING + +from .base import CommandError, CommandParser + + +def popen_wrapper(args, stdout_encoding="utf-8"): + """ + Friendly wrapper around Popen. + + Return stdout output, stderr output, and OS status code. + """ + try: + p = run(args, capture_output=True, close_fds=os.name != "nt") + except OSError as err: + raise CommandError("Error executing %s" % args[0]) from err + return ( + p.stdout.decode(stdout_encoding), + p.stderr.decode(DEFAULT_LOCALE_ENCODING, errors="replace"), + p.returncode, + ) + + +def handle_extensions(extensions): + """ + Organize multiple extensions that are separated with commas or passed by + using --extension/-e multiple times. + + For example: running 'django-admin makemessages -e js,txt -e xhtml -a' + would result in an extension list: ['.js', '.txt', '.xhtml'] + + >>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py']) + {'.html', '.js', '.py'} + >>> handle_extensions(['.html, txt,.tpl']) + {'.html', '.tpl', '.txt'} + """ + ext_list = [] + for ext in extensions: + ext_list.extend(ext.replace(" ", "").split(",")) + for i, ext in enumerate(ext_list): + if not ext.startswith("."): + ext_list[i] = ".%s" % ext_list[i] + return set(ext_list) + + +def find_command(cmd, path=None, pathext=None): + if path is None: + path = os.environ.get("PATH", "").split(os.pathsep) + if isinstance(path, str): + path = [path] + # check if there are funny path extensions for executables, e.g. Windows + if pathext is None: + pathext = os.environ.get("PATHEXT", ".COM;.EXE;.BAT;.CMD").split(os.pathsep) + # don't use extensions if the command ends with one of them + for ext in pathext: + if cmd.endswith(ext): + pathext = [""] + break + # check if we find the command on PATH + for p in path: + f = os.path.join(p, cmd) + if os.path.isfile(f): + return f + for ext in pathext: + fext = f + ext + if os.path.isfile(fext): + return fext + return None + + +def get_random_secret_key(): + """ + Return a 50 character random string usable as a SECRET_KEY setting value. + """ + chars = "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)" + return get_random_string(50, chars) + + +def parse_apps_and_model_labels(labels): + """ + Parse a list of "app_label.ModelName" or "app_label" strings into actual + objects and return a two-element tuple: + (set of model classes, set of app_configs). + Raise a CommandError if some specified models or apps don't exist. + """ + apps = set() + models = set() + + for label in labels: + if "." in label: + try: + model = installed_apps.get_model(label) + except LookupError: + raise CommandError("Unknown model: %s" % label) + models.add(model) + else: + try: + app_config = installed_apps.get_app_config(label) + except LookupError as e: + raise CommandError(str(e)) + apps.add(app_config) + + return models, apps + + +def get_command_line_option(argv, option): + """ + Return the value of a command line option (which should include leading + dashes, e.g. '--testrunner') from an argument list. Return None if the + option wasn't passed or if the argument list couldn't be parsed. + """ + parser = CommandParser(add_help=False, allow_abbrev=False) + parser.add_argument(option, dest="value") + try: + options, _ = parser.parse_known_args(argv[2:]) + except CommandError: + return None + else: + return options.value + + +def normalize_path_patterns(patterns): + """Normalize an iterable of glob style patterns based on OS.""" + patterns = [os.path.normcase(p) for p in patterns] + dir_suffixes = {"%s*" % path_sep for path_sep in {"/", os.sep}} + norm_patterns = [] + for pattern in patterns: + for dir_suffix in dir_suffixes: + if pattern.endswith(dir_suffix): + norm_patterns.append(pattern.removesuffix(dir_suffix)) + break + else: + norm_patterns.append(pattern) + return norm_patterns + + +def is_ignored_path(path, ignore_patterns): + """ + Check if the given path should be ignored or not based on matching + one of the glob style `ignore_patterns`. + """ + path = Path(path) + + def ignore(pattern): + return fnmatch.fnmatchcase(path.name, pattern) or fnmatch.fnmatchcase( + str(path), pattern + ) + + return any(ignore(pattern) for pattern in normalize_path_patterns(ignore_patterns)) + + +def find_formatters(): + return {"black_path": shutil.which("black")} + + +def run_formatters(written_files, black_path=(sentinel := object())): + """ + Run the black formatter on the specified files. + """ + # Use a sentinel rather than None, as which() returns None when not found. + if black_path is sentinel: + black_path = shutil.which("black") + if black_path: + subprocess.run( + [black_path, "--fast", "--", *written_files], + capture_output=True, + ) diff --git a/testbed/django__django/django/core/paginator.py b/testbed/django__django/django/core/paginator.py new file mode 100644 index 0000000000000000000000000000000000000000..7b3189cc8b28c1ceea1e31fa0627d35e73732ef3 --- /dev/null +++ b/testbed/django__django/django/core/paginator.py @@ -0,0 +1,238 @@ +import collections.abc +import inspect +import warnings +from math import ceil + +from django.utils.functional import cached_property +from django.utils.inspect import method_has_no_args +from django.utils.translation import gettext_lazy as _ + + +class UnorderedObjectListWarning(RuntimeWarning): + pass + + +class InvalidPage(Exception): + pass + + +class PageNotAnInteger(InvalidPage): + pass + + +class EmptyPage(InvalidPage): + pass + + +class Paginator: + # Translators: String used to replace omitted page numbers in elided page + # range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10]. + ELLIPSIS = _("…") + default_error_messages = { + "invalid_page": _("That page number is not an integer"), + "min_page": _("That page number is less than 1"), + "no_results": _("That page contains no results"), + } + + def __init__( + self, + object_list, + per_page, + orphans=0, + allow_empty_first_page=True, + error_messages=None, + ): + self.object_list = object_list + self._check_object_list_is_ordered() + self.per_page = int(per_page) + self.orphans = int(orphans) + self.allow_empty_first_page = allow_empty_first_page + self.error_messages = ( + self.default_error_messages + if error_messages is None + else self.default_error_messages | error_messages + ) + + def __iter__(self): + for page_number in self.page_range: + yield self.page(page_number) + + def validate_number(self, number): + """Validate the given 1-based page number.""" + try: + if isinstance(number, float) and not number.is_integer(): + raise ValueError + number = int(number) + except (TypeError, ValueError): + raise PageNotAnInteger(self.error_messages["invalid_page"]) + if number < 1: + raise EmptyPage(self.error_messages["min_page"]) + if number > self.num_pages: + raise EmptyPage(self.error_messages["no_results"]) + return number + + def get_page(self, number): + """ + Return a valid page, even if the page argument isn't a number or isn't + in range. + """ + try: + number = self.validate_number(number) + except PageNotAnInteger: + number = 1 + except EmptyPage: + number = self.num_pages + return self.page(number) + + def page(self, number): + """Return a Page object for the given 1-based page number.""" + number = self.validate_number(number) + bottom = (number - 1) * self.per_page + top = bottom + self.per_page + if top + self.orphans >= self.count: + top = self.count + return self._get_page(self.object_list[bottom:top], number, self) + + def _get_page(self, *args, **kwargs): + """ + Return an instance of a single page. + + This hook can be used by subclasses to use an alternative to the + standard :cls:`Page` object. + """ + return Page(*args, **kwargs) + + @cached_property + def count(self): + """Return the total number of objects, across all pages.""" + c = getattr(self.object_list, "count", None) + if callable(c) and not inspect.isbuiltin(c) and method_has_no_args(c): + return c() + return len(self.object_list) + + @cached_property + def num_pages(self): + """Return the total number of pages.""" + if self.count == 0 and not self.allow_empty_first_page: + return 0 + hits = max(1, self.count - self.orphans) + return ceil(hits / self.per_page) + + @property + def page_range(self): + """ + Return a 1-based range of pages for iterating through within + a template for loop. + """ + return range(1, self.num_pages + 1) + + def _check_object_list_is_ordered(self): + """ + Warn if self.object_list is unordered (typically a QuerySet). + """ + ordered = getattr(self.object_list, "ordered", None) + if ordered is not None and not ordered: + obj_list_repr = ( + "{} {}".format( + self.object_list.model, self.object_list.__class__.__name__ + ) + if hasattr(self.object_list, "model") + else "{!r}".format(self.object_list) + ) + warnings.warn( + "Pagination may yield inconsistent results with an unordered " + "object_list: {}.".format(obj_list_repr), + UnorderedObjectListWarning, + stacklevel=3, + ) + + def get_elided_page_range(self, number=1, *, on_each_side=3, on_ends=2): + """ + Return a 1-based range of pages with some values elided. + + If the page range is larger than a given size, the whole range is not + provided and a compact form is returned instead, e.g. for a paginator + with 50 pages, if page 43 were the current page, the output, with the + default arguments, would be: + + 1, 2, …, 40, 41, 42, 43, 44, 45, 46, …, 49, 50. + """ + number = self.validate_number(number) + + if self.num_pages <= (on_each_side + on_ends) * 2: + yield from self.page_range + return + + if number > (1 + on_each_side + on_ends) + 1: + yield from range(1, on_ends + 1) + yield self.ELLIPSIS + yield from range(number - on_each_side, number + 1) + else: + yield from range(1, number + 1) + + if number < (self.num_pages - on_each_side - on_ends) - 1: + yield from range(number + 1, number + on_each_side + 1) + yield self.ELLIPSIS + yield from range(self.num_pages - on_ends + 1, self.num_pages + 1) + else: + yield from range(number + 1, self.num_pages + 1) + + +class Page(collections.abc.Sequence): + def __init__(self, object_list, number, paginator): + self.object_list = object_list + self.number = number + self.paginator = paginator + + def __repr__(self): + return "" % (self.number, self.paginator.num_pages) + + def __len__(self): + return len(self.object_list) + + def __getitem__(self, index): + if not isinstance(index, (int, slice)): + raise TypeError( + "Page indices must be integers or slices, not %s." + % type(index).__name__ + ) + # The object_list is converted to a list so that if it was a QuerySet + # it won't be a database hit per __getitem__. + if not isinstance(self.object_list, list): + self.object_list = list(self.object_list) + return self.object_list[index] + + def has_next(self): + return self.number < self.paginator.num_pages + + def has_previous(self): + return self.number > 1 + + def has_other_pages(self): + return self.has_previous() or self.has_next() + + def next_page_number(self): + return self.paginator.validate_number(self.number + 1) + + def previous_page_number(self): + return self.paginator.validate_number(self.number - 1) + + def start_index(self): + """ + Return the 1-based index of the first object on this page, + relative to total objects in the paginator. + """ + # Special case, return zero if no items. + if self.paginator.count == 0: + return 0 + return (self.paginator.per_page * (self.number - 1)) + 1 + + def end_index(self): + """ + Return the 1-based index of the last object on this page, + relative to total objects found (hits). + """ + # Special case for the last page because there can be orphans. + if self.number == self.paginator.num_pages: + return self.paginator.count + return self.number * self.paginator.per_page diff --git a/testbed/django__django/django/core/serializers/__init__.py b/testbed/django__django/django/core/serializers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..480c54b79b64167fd62ed8580956b4c87bbaac44 --- /dev/null +++ b/testbed/django__django/django/core/serializers/__init__.py @@ -0,0 +1,254 @@ +""" +Interfaces for serializing Django objects. + +Usage:: + + from django.core import serializers + json = serializers.serialize("json", some_queryset) + objects = list(serializers.deserialize("json", json)) + +To add your own serializers, use the SERIALIZATION_MODULES setting:: + + SERIALIZATION_MODULES = { + "csv": "path.to.csv.serializer", + "txt": "path.to.txt.serializer", + } + +""" + +import importlib + +from django.apps import apps +from django.conf import settings +from django.core.serializers.base import SerializerDoesNotExist + +# Built-in serializers +BUILTIN_SERIALIZERS = { + "xml": "django.core.serializers.xml_serializer", + "python": "django.core.serializers.python", + "json": "django.core.serializers.json", + "yaml": "django.core.serializers.pyyaml", + "jsonl": "django.core.serializers.jsonl", +} + +_serializers = {} + + +class BadSerializer: + """ + Stub serializer to hold exception raised during registration + + This allows the serializer registration to cache serializers and if there + is an error raised in the process of creating a serializer it will be + raised and passed along to the caller when the serializer is used. + """ + + internal_use_only = False + + def __init__(self, exception): + self.exception = exception + + def __call__(self, *args, **kwargs): + raise self.exception + + +def register_serializer(format, serializer_module, serializers=None): + """Register a new serializer. + + ``serializer_module`` should be the fully qualified module name + for the serializer. + + If ``serializers`` is provided, the registration will be added + to the provided dictionary. + + If ``serializers`` is not provided, the registration will be made + directly into the global register of serializers. Adding serializers + directly is not a thread-safe operation. + """ + if serializers is None and not _serializers: + _load_serializers() + + try: + module = importlib.import_module(serializer_module) + except ImportError as exc: + bad_serializer = BadSerializer(exc) + + module = type( + "BadSerializerModule", + (), + { + "Deserializer": bad_serializer, + "Serializer": bad_serializer, + }, + ) + + if serializers is None: + _serializers[format] = module + else: + serializers[format] = module + + +def unregister_serializer(format): + "Unregister a given serializer. This is not a thread-safe operation." + if not _serializers: + _load_serializers() + if format not in _serializers: + raise SerializerDoesNotExist(format) + del _serializers[format] + + +def get_serializer(format): + if not _serializers: + _load_serializers() + if format not in _serializers: + raise SerializerDoesNotExist(format) + return _serializers[format].Serializer + + +def get_serializer_formats(): + if not _serializers: + _load_serializers() + return list(_serializers) + + +def get_public_serializer_formats(): + if not _serializers: + _load_serializers() + return [k for k, v in _serializers.items() if not v.Serializer.internal_use_only] + + +def get_deserializer(format): + if not _serializers: + _load_serializers() + if format not in _serializers: + raise SerializerDoesNotExist(format) + return _serializers[format].Deserializer + + +def serialize(format, queryset, **options): + """ + Serialize a queryset (or any iterator that returns database objects) using + a certain serializer. + """ + s = get_serializer(format)() + s.serialize(queryset, **options) + return s.getvalue() + + +def deserialize(format, stream_or_string, **options): + """ + Deserialize a stream or a string. Return an iterator that yields ``(obj, + m2m_relation_dict)``, where ``obj`` is an instantiated -- but *unsaved* -- + object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name : + list_of_related_objects}``. + """ + d = get_deserializer(format) + return d(stream_or_string, **options) + + +def _load_serializers(): + """ + Register built-in and settings-defined serializers. This is done lazily so + that user code has a chance to (e.g.) set up custom settings without + needing to be careful of import order. + """ + global _serializers + serializers = {} + for format in BUILTIN_SERIALIZERS: + register_serializer(format, BUILTIN_SERIALIZERS[format], serializers) + if hasattr(settings, "SERIALIZATION_MODULES"): + for format in settings.SERIALIZATION_MODULES: + register_serializer( + format, settings.SERIALIZATION_MODULES[format], serializers + ) + _serializers = serializers + + +def sort_dependencies(app_list, allow_cycles=False): + """Sort a list of (app_config, models) pairs into a single list of models. + + The single list of models is sorted so that any model with a natural key + is serialized before a normal model, and any model with a natural key + dependency has it's dependencies serialized first. + + If allow_cycles is True, return the best-effort ordering that will respect + most of dependencies but ignore some of them to break the cycles. + """ + # Process the list of models, and get the list of dependencies + model_dependencies = [] + models = set() + for app_config, model_list in app_list: + if model_list is None: + model_list = app_config.get_models() + + for model in model_list: + models.add(model) + # Add any explicitly defined dependencies + if hasattr(model, "natural_key"): + deps = getattr(model.natural_key, "dependencies", []) + if deps: + deps = [apps.get_model(dep) for dep in deps] + else: + deps = [] + + # Now add a dependency for any FK relation with a model that + # defines a natural key + for field in model._meta.fields: + if field.remote_field: + rel_model = field.remote_field.model + if hasattr(rel_model, "natural_key") and rel_model != model: + deps.append(rel_model) + # Also add a dependency for any simple M2M relation with a model + # that defines a natural key. M2M relations with explicit through + # models don't count as dependencies. + for field in model._meta.many_to_many: + if field.remote_field.through._meta.auto_created: + rel_model = field.remote_field.model + if hasattr(rel_model, "natural_key") and rel_model != model: + deps.append(rel_model) + model_dependencies.append((model, deps)) + + model_dependencies.reverse() + # Now sort the models to ensure that dependencies are met. This + # is done by repeatedly iterating over the input list of models. + # If all the dependencies of a given model are in the final list, + # that model is promoted to the end of the final list. This process + # continues until the input list is empty, or we do a full iteration + # over the input models without promoting a model to the final list. + # If we do a full iteration without a promotion, that means there are + # circular dependencies in the list. + model_list = [] + while model_dependencies: + skipped = [] + changed = False + while model_dependencies: + model, deps = model_dependencies.pop() + + # If all of the models in the dependency list are either already + # on the final model list, or not on the original serialization list, + # then we've found another model with all it's dependencies satisfied. + if all(d not in models or d in model_list for d in deps): + model_list.append(model) + changed = True + else: + skipped.append((model, deps)) + if not changed: + if allow_cycles: + # If cycles are allowed, add the last skipped model and ignore + # its dependencies. This could be improved by some graph + # analysis to ignore as few dependencies as possible. + model, _ = skipped.pop() + model_list.append(model) + else: + raise RuntimeError( + "Can't resolve dependencies for %s in serialized app list." + % ", ".join( + model._meta.label + for model, deps in sorted( + skipped, key=lambda obj: obj[0].__name__ + ) + ), + ) + model_dependencies = skipped + + return model_list diff --git a/testbed/django__django/django/core/serializers/base.py b/testbed/django__django/django/core/serializers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..20dffac05f7bae042563e13ba80473abfaad6dd3 --- /dev/null +++ b/testbed/django__django/django/core/serializers/base.py @@ -0,0 +1,386 @@ +""" +Module for abstract serializer/unserializer base classes. +""" +from io import StringIO + +from django.core.exceptions import ObjectDoesNotExist +from django.db import models + +DEFER_FIELD = object() + + +class SerializerDoesNotExist(KeyError): + """The requested serializer was not found.""" + + pass + + +class SerializationError(Exception): + """Something bad happened during serialization.""" + + pass + + +class DeserializationError(Exception): + """Something bad happened during deserialization.""" + + @classmethod + def WithData(cls, original_exc, model, fk, field_value): + """ + Factory method for creating a deserialization error which has a more + explanatory message. + """ + return cls( + "%s: (%s:pk=%s) field_value was '%s'" + % (original_exc, model, fk, field_value) + ) + + +class M2MDeserializationError(Exception): + """Something bad happened during deserialization of a ManyToManyField.""" + + def __init__(self, original_exc, pk): + self.original_exc = original_exc + self.pk = pk + + +class ProgressBar: + progress_width = 75 + + def __init__(self, output, total_count): + self.output = output + self.total_count = total_count + self.prev_done = 0 + + def update(self, count): + if not self.output: + return + perc = count * 100 // self.total_count + done = perc * self.progress_width // 100 + if self.prev_done >= done: + return + self.prev_done = done + cr = "" if self.total_count == 1 else "\r" + self.output.write( + cr + "[" + "." * done + " " * (self.progress_width - done) + "]" + ) + if done == self.progress_width: + self.output.write("\n") + self.output.flush() + + +class Serializer: + """ + Abstract serializer base class. + """ + + # Indicates if the implemented serializer is only available for + # internal Django use. + internal_use_only = False + progress_class = ProgressBar + stream_class = StringIO + + def serialize( + self, + queryset, + *, + stream=None, + fields=None, + use_natural_foreign_keys=False, + use_natural_primary_keys=False, + progress_output=None, + object_count=0, + **options, + ): + """ + Serialize a queryset. + """ + self.options = options + + self.stream = stream if stream is not None else self.stream_class() + self.selected_fields = fields + self.use_natural_foreign_keys = use_natural_foreign_keys + self.use_natural_primary_keys = use_natural_primary_keys + progress_bar = self.progress_class(progress_output, object_count) + + self.start_serialization() + self.first = True + for count, obj in enumerate(queryset, start=1): + self.start_object(obj) + # Use the concrete parent class' _meta instead of the object's _meta + # This is to avoid local_fields problems for proxy models. Refs #17717. + concrete_model = obj._meta.concrete_model + # When using natural primary keys, retrieve the pk field of the + # parent for multi-table inheritance child models. That field must + # be serialized, otherwise deserialization isn't possible. + if self.use_natural_primary_keys: + pk = concrete_model._meta.pk + pk_parent = ( + pk if pk.remote_field and pk.remote_field.parent_link else None + ) + else: + pk_parent = None + for field in concrete_model._meta.local_fields: + if field.serialize or field is pk_parent: + if field.remote_field is None: + if ( + self.selected_fields is None + or field.attname in self.selected_fields + ): + self.handle_field(obj, field) + else: + if ( + self.selected_fields is None + or field.attname[:-3] in self.selected_fields + ): + self.handle_fk_field(obj, field) + for field in concrete_model._meta.local_many_to_many: + if field.serialize: + if ( + self.selected_fields is None + or field.attname in self.selected_fields + ): + self.handle_m2m_field(obj, field) + self.end_object(obj) + progress_bar.update(count) + self.first = self.first and False + self.end_serialization() + return self.getvalue() + + def start_serialization(self): + """ + Called when serializing of the queryset starts. + """ + raise NotImplementedError( + "subclasses of Serializer must provide a start_serialization() method" + ) + + def end_serialization(self): + """ + Called when serializing of the queryset ends. + """ + pass + + def start_object(self, obj): + """ + Called when serializing of an object starts. + """ + raise NotImplementedError( + "subclasses of Serializer must provide a start_object() method" + ) + + def end_object(self, obj): + """ + Called when serializing of an object ends. + """ + pass + + def handle_field(self, obj, field): + """ + Called to handle each individual (non-relational) field on an object. + """ + raise NotImplementedError( + "subclasses of Serializer must provide a handle_field() method" + ) + + def handle_fk_field(self, obj, field): + """ + Called to handle a ForeignKey field. + """ + raise NotImplementedError( + "subclasses of Serializer must provide a handle_fk_field() method" + ) + + def handle_m2m_field(self, obj, field): + """ + Called to handle a ManyToManyField. + """ + raise NotImplementedError( + "subclasses of Serializer must provide a handle_m2m_field() method" + ) + + def getvalue(self): + """ + Return the fully serialized queryset (or None if the output stream is + not seekable). + """ + if callable(getattr(self.stream, "getvalue", None)): + return self.stream.getvalue() + + +class Deserializer: + """ + Abstract base deserializer class. + """ + + def __init__(self, stream_or_string, **options): + """ + Init this serializer given a stream or a string + """ + self.options = options + if isinstance(stream_or_string, str): + self.stream = StringIO(stream_or_string) + else: + self.stream = stream_or_string + + def __iter__(self): + return self + + def __next__(self): + """Iteration interface -- return the next item in the stream""" + raise NotImplementedError( + "subclasses of Deserializer must provide a __next__() method" + ) + + +class DeserializedObject: + """ + A deserialized model. + + Basically a container for holding the pre-saved deserialized data along + with the many-to-many data saved with the object. + + Call ``save()`` to save the object (with the many-to-many data) to the + database; call ``save(save_m2m=False)`` to save just the object fields + (and not touch the many-to-many stuff.) + """ + + def __init__(self, obj, m2m_data=None, deferred_fields=None): + self.object = obj + self.m2m_data = m2m_data + self.deferred_fields = deferred_fields + + def __repr__(self): + return "<%s: %s(pk=%s)>" % ( + self.__class__.__name__, + self.object._meta.label, + self.object.pk, + ) + + def save(self, save_m2m=True, using=None, **kwargs): + # Call save on the Model baseclass directly. This bypasses any + # model-defined save. The save is also forced to be raw. + # raw=True is passed to any pre/post_save signals. + models.Model.save_base(self.object, using=using, raw=True, **kwargs) + if self.m2m_data and save_m2m: + for accessor_name, object_list in self.m2m_data.items(): + getattr(self.object, accessor_name).set(object_list) + + # prevent a second (possibly accidental) call to save() from saving + # the m2m data twice. + self.m2m_data = None + + def save_deferred_fields(self, using=None): + self.m2m_data = {} + for field, field_value in self.deferred_fields.items(): + opts = self.object._meta + label = opts.app_label + "." + opts.model_name + if isinstance(field.remote_field, models.ManyToManyRel): + try: + values = deserialize_m2m_values( + field, field_value, using, handle_forward_references=False + ) + except M2MDeserializationError as e: + raise DeserializationError.WithData( + e.original_exc, label, self.object.pk, e.pk + ) + self.m2m_data[field.name] = values + elif isinstance(field.remote_field, models.ManyToOneRel): + try: + value = deserialize_fk_value( + field, field_value, using, handle_forward_references=False + ) + except Exception as e: + raise DeserializationError.WithData( + e, label, self.object.pk, field_value + ) + setattr(self.object, field.attname, value) + self.save() + + +def build_instance(Model, data, db): + """ + Build a model instance. + + If the model instance doesn't have a primary key and the model supports + natural keys, try to retrieve it from the database. + """ + default_manager = Model._meta.default_manager + pk = data.get(Model._meta.pk.attname) + if ( + pk is None + and hasattr(default_manager, "get_by_natural_key") + and hasattr(Model, "natural_key") + ): + obj = Model(**data) + obj._state.db = db + natural_key = obj.natural_key() + try: + data[Model._meta.pk.attname] = Model._meta.pk.to_python( + default_manager.db_manager(db).get_by_natural_key(*natural_key).pk + ) + except Model.DoesNotExist: + pass + return Model(**data) + + +def deserialize_m2m_values(field, field_value, using, handle_forward_references): + model = field.remote_field.model + if hasattr(model._default_manager, "get_by_natural_key"): + + def m2m_convert(value): + if hasattr(value, "__iter__") and not isinstance(value, str): + return ( + model._default_manager.db_manager(using) + .get_by_natural_key(*value) + .pk + ) + else: + return model._meta.pk.to_python(value) + + else: + + def m2m_convert(v): + return model._meta.pk.to_python(v) + + try: + pks_iter = iter(field_value) + except TypeError as e: + raise M2MDeserializationError(e, field_value) + try: + values = [] + for pk in pks_iter: + values.append(m2m_convert(pk)) + return values + except Exception as e: + if isinstance(e, ObjectDoesNotExist) and handle_forward_references: + return DEFER_FIELD + else: + raise M2MDeserializationError(e, pk) + + +def deserialize_fk_value(field, field_value, using, handle_forward_references): + if field_value is None: + return None + model = field.remote_field.model + default_manager = model._default_manager + field_name = field.remote_field.field_name + if ( + hasattr(default_manager, "get_by_natural_key") + and hasattr(field_value, "__iter__") + and not isinstance(field_value, str) + ): + try: + obj = default_manager.db_manager(using).get_by_natural_key(*field_value) + except ObjectDoesNotExist: + if handle_forward_references: + return DEFER_FIELD + else: + raise + value = getattr(obj, field_name) + # If this is a natural foreign key to an object that has a FK/O2O as + # the foreign key, use the FK value. + if model._meta.pk.remote_field: + value = value.pk + return value + return model._meta.get_field(field_name).to_python(field_value) diff --git a/testbed/django__django/django/core/serializers/json.py b/testbed/django__django/django/core/serializers/json.py new file mode 100644 index 0000000000000000000000000000000000000000..afac821465a1c7245566b0c5f71e96bc915fef61 --- /dev/null +++ b/testbed/django__django/django/core/serializers/json.py @@ -0,0 +1,106 @@ +""" +Serialize data to/from JSON +""" + +import datetime +import decimal +import json +import uuid + +from django.core.serializers.base import DeserializationError +from django.core.serializers.python import Deserializer as PythonDeserializer +from django.core.serializers.python import Serializer as PythonSerializer +from django.utils.duration import duration_iso_string +from django.utils.functional import Promise +from django.utils.timezone import is_aware + + +class Serializer(PythonSerializer): + """Convert a queryset to JSON.""" + + internal_use_only = False + + def _init_options(self): + self._current = None + self.json_kwargs = self.options.copy() + self.json_kwargs.pop("stream", None) + self.json_kwargs.pop("fields", None) + if self.options.get("indent"): + # Prevent trailing spaces + self.json_kwargs["separators"] = (",", ": ") + self.json_kwargs.setdefault("cls", DjangoJSONEncoder) + self.json_kwargs.setdefault("ensure_ascii", False) + + def start_serialization(self): + self._init_options() + self.stream.write("[") + + def end_serialization(self): + if self.options.get("indent"): + self.stream.write("\n") + self.stream.write("]") + if self.options.get("indent"): + self.stream.write("\n") + + def end_object(self, obj): + # self._current has the field data + indent = self.options.get("indent") + if not self.first: + self.stream.write(",") + if not indent: + self.stream.write(" ") + if indent: + self.stream.write("\n") + json.dump(self.get_dump_object(obj), self.stream, **self.json_kwargs) + self._current = None + + def getvalue(self): + # Grandparent super + return super(PythonSerializer, self).getvalue() + + +def Deserializer(stream_or_string, **options): + """Deserialize a stream or string of JSON data.""" + if not isinstance(stream_or_string, (bytes, str)): + stream_or_string = stream_or_string.read() + if isinstance(stream_or_string, bytes): + stream_or_string = stream_or_string.decode() + try: + objects = json.loads(stream_or_string) + yield from PythonDeserializer(objects, **options) + except (GeneratorExit, DeserializationError): + raise + except Exception as exc: + raise DeserializationError() from exc + + +class DjangoJSONEncoder(json.JSONEncoder): + """ + JSONEncoder subclass that knows how to encode date/time, decimal types, and + UUIDs. + """ + + def default(self, o): + # See "Date Time String Format" in the ECMA-262 specification. + if isinstance(o, datetime.datetime): + r = o.isoformat() + if o.microsecond: + r = r[:23] + r[26:] + if r.endswith("+00:00"): + r = r.removesuffix("+00:00") + "Z" + return r + elif isinstance(o, datetime.date): + return o.isoformat() + elif isinstance(o, datetime.time): + if is_aware(o): + raise ValueError("JSON can't represent timezone-aware times.") + r = o.isoformat() + if o.microsecond: + r = r[:12] + return r + elif isinstance(o, datetime.timedelta): + return duration_iso_string(o) + elif isinstance(o, (decimal.Decimal, uuid.UUID, Promise)): + return str(o) + else: + return super().default(o) diff --git a/testbed/django__django/django/core/serializers/jsonl.py b/testbed/django__django/django/core/serializers/jsonl.py new file mode 100644 index 0000000000000000000000000000000000000000..c264c2ccafe884b093a3d66c6411f59455a01681 --- /dev/null +++ b/testbed/django__django/django/core/serializers/jsonl.py @@ -0,0 +1,57 @@ +""" +Serialize data to/from JSON Lines +""" + +import json + +from django.core.serializers.base import DeserializationError +from django.core.serializers.json import DjangoJSONEncoder +from django.core.serializers.python import Deserializer as PythonDeserializer +from django.core.serializers.python import Serializer as PythonSerializer + + +class Serializer(PythonSerializer): + """Convert a queryset to JSON Lines.""" + + internal_use_only = False + + def _init_options(self): + self._current = None + self.json_kwargs = self.options.copy() + self.json_kwargs.pop("stream", None) + self.json_kwargs.pop("fields", None) + self.json_kwargs.pop("indent", None) + self.json_kwargs["separators"] = (",", ": ") + self.json_kwargs.setdefault("cls", DjangoJSONEncoder) + self.json_kwargs.setdefault("ensure_ascii", False) + + def start_serialization(self): + self._init_options() + + def end_object(self, obj): + # self._current has the field data + json.dump(self.get_dump_object(obj), self.stream, **self.json_kwargs) + self.stream.write("\n") + self._current = None + + def getvalue(self): + # Grandparent super + return super(PythonSerializer, self).getvalue() + + +def Deserializer(stream_or_string, **options): + """Deserialize a stream or string of JSON data.""" + if isinstance(stream_or_string, bytes): + stream_or_string = stream_or_string.decode() + if isinstance(stream_or_string, (bytes, str)): + stream_or_string = stream_or_string.split("\n") + + for line in stream_or_string: + if not line.strip(): + continue + try: + yield from PythonDeserializer([json.loads(line)], **options) + except (GeneratorExit, DeserializationError): + raise + except Exception as exc: + raise DeserializationError() from exc diff --git a/testbed/django__django/django/core/serializers/python.py b/testbed/django__django/django/core/serializers/python.py new file mode 100644 index 0000000000000000000000000000000000000000..0dc504aa34e116c7be85d089806eeb1288827cb3 --- /dev/null +++ b/testbed/django__django/django/core/serializers/python.py @@ -0,0 +1,192 @@ +""" +A Python "serializer". Doesn't do much serializing per se -- just converts to +and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for +other serializers. +""" + +from django.apps import apps +from django.core.serializers import base +from django.db import DEFAULT_DB_ALIAS, models +from django.utils.encoding import is_protected_type + + +class Serializer(base.Serializer): + """ + Serialize a QuerySet to basic Python objects. + """ + + internal_use_only = True + + def start_serialization(self): + self._current = None + self.objects = [] + + def end_serialization(self): + pass + + def start_object(self, obj): + self._current = {} + + def end_object(self, obj): + self.objects.append(self.get_dump_object(obj)) + self._current = None + + def get_dump_object(self, obj): + data = {"model": str(obj._meta)} + if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"): + data["pk"] = self._value_from_field(obj, obj._meta.pk) + data["fields"] = self._current + return data + + def _value_from_field(self, obj, field): + value = field.value_from_object(obj) + # Protected types (i.e., primitives like None, numbers, dates, + # and Decimals) are passed through as is. All other values are + # converted to string first. + return value if is_protected_type(value) else field.value_to_string(obj) + + def handle_field(self, obj, field): + self._current[field.name] = self._value_from_field(obj, field) + + def handle_fk_field(self, obj, field): + if self.use_natural_foreign_keys and hasattr( + field.remote_field.model, "natural_key" + ): + related = getattr(obj, field.name) + if related: + value = related.natural_key() + else: + value = None + else: + value = self._value_from_field(obj, field) + self._current[field.name] = value + + def handle_m2m_field(self, obj, field): + if field.remote_field.through._meta.auto_created: + if self.use_natural_foreign_keys and hasattr( + field.remote_field.model, "natural_key" + ): + + def m2m_value(value): + return value.natural_key() + + def queryset_iterator(obj, field): + return getattr(obj, field.name).iterator() + + else: + + def m2m_value(value): + return self._value_from_field(value, value._meta.pk) + + def queryset_iterator(obj, field): + return ( + getattr(obj, field.name).select_related().only("pk").iterator() + ) + + m2m_iter = getattr(obj, "_prefetched_objects_cache", {}).get( + field.name, + queryset_iterator(obj, field), + ) + self._current[field.name] = [m2m_value(related) for related in m2m_iter] + + def getvalue(self): + return self.objects + + +def Deserializer( + object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options +): + """ + Deserialize simple Python objects back into Django ORM instances. + + It's expected that you pass the Python objects themselves (instead of a + stream or a string) to the constructor + """ + handle_forward_references = options.pop("handle_forward_references", False) + field_names_cache = {} # Model: + + for d in object_list: + # Look up the model and starting build a dict of data for it. + try: + Model = _get_model(d["model"]) + except base.DeserializationError: + if ignorenonexistent: + continue + else: + raise + data = {} + if "pk" in d: + try: + data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get("pk")) + except Exception as e: + raise base.DeserializationError.WithData( + e, d["model"], d.get("pk"), None + ) + m2m_data = {} + deferred_fields = {} + + if Model not in field_names_cache: + field_names_cache[Model] = {f.name for f in Model._meta.get_fields()} + field_names = field_names_cache[Model] + + # Handle each field + for field_name, field_value in d["fields"].items(): + if ignorenonexistent and field_name not in field_names: + # skip fields no longer on model + continue + + field = Model._meta.get_field(field_name) + + # Handle M2M relations + if field.remote_field and isinstance( + field.remote_field, models.ManyToManyRel + ): + try: + values = base.deserialize_m2m_values( + field, field_value, using, handle_forward_references + ) + except base.M2MDeserializationError as e: + raise base.DeserializationError.WithData( + e.original_exc, d["model"], d.get("pk"), e.pk + ) + if values == base.DEFER_FIELD: + deferred_fields[field] = field_value + else: + m2m_data[field.name] = values + # Handle FK fields + elif field.remote_field and isinstance( + field.remote_field, models.ManyToOneRel + ): + try: + value = base.deserialize_fk_value( + field, field_value, using, handle_forward_references + ) + except Exception as e: + raise base.DeserializationError.WithData( + e, d["model"], d.get("pk"), field_value + ) + if value == base.DEFER_FIELD: + deferred_fields[field] = field_value + else: + data[field.attname] = value + # Handle all other fields + else: + try: + data[field.name] = field.to_python(field_value) + except Exception as e: + raise base.DeserializationError.WithData( + e, d["model"], d.get("pk"), field_value + ) + + obj = base.build_instance(Model, data, using) + yield base.DeserializedObject(obj, m2m_data, deferred_fields) + + +def _get_model(model_identifier): + """Look up a model from an "app_label.model_name" string.""" + try: + return apps.get_model(model_identifier) + except (LookupError, TypeError): + raise base.DeserializationError( + "Invalid model identifier: '%s'" % model_identifier + ) diff --git a/testbed/django__django/django/core/serializers/pyyaml.py b/testbed/django__django/django/core/serializers/pyyaml.py new file mode 100644 index 0000000000000000000000000000000000000000..9a20b6658f923825f2a339b75c90c58ab60f3b40 --- /dev/null +++ b/testbed/django__django/django/core/serializers/pyyaml.py @@ -0,0 +1,82 @@ +""" +YAML serializer. + +Requires PyYaml (https://pyyaml.org/), but that's checked for in __init__. +""" + +import collections +import decimal +from io import StringIO + +import yaml + +from django.core.serializers.base import DeserializationError +from django.core.serializers.python import Deserializer as PythonDeserializer +from django.core.serializers.python import Serializer as PythonSerializer +from django.db import models + +# Use the C (faster) implementation if possible +try: + from yaml import CSafeDumper as SafeDumper + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeDumper, SafeLoader + + +class DjangoSafeDumper(SafeDumper): + def represent_decimal(self, data): + return self.represent_scalar("tag:yaml.org,2002:str", str(data)) + + def represent_ordered_dict(self, data): + return self.represent_mapping("tag:yaml.org,2002:map", data.items()) + + +DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal) +DjangoSafeDumper.add_representer( + collections.OrderedDict, DjangoSafeDumper.represent_ordered_dict +) +# Workaround to represent dictionaries in insertion order. +# See https://github.com/yaml/pyyaml/pull/143. +DjangoSafeDumper.add_representer(dict, DjangoSafeDumper.represent_ordered_dict) + + +class Serializer(PythonSerializer): + """Convert a queryset to YAML.""" + + internal_use_only = False + + def handle_field(self, obj, field): + # A nasty special case: base YAML doesn't support serialization of time + # types (as opposed to dates or datetimes, which it does support). Since + # we want to use the "safe" serializer for better interoperability, we + # need to do something with those pesky times. Converting 'em to strings + # isn't perfect, but it's better than a "!!python/time" type which would + # halt deserialization under any other language. + if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None: + self._current[field.name] = str(getattr(obj, field.name)) + else: + super().handle_field(obj, field) + + def end_serialization(self): + self.options.setdefault("allow_unicode", True) + yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options) + + def getvalue(self): + # Grandparent super + return super(PythonSerializer, self).getvalue() + + +def Deserializer(stream_or_string, **options): + """Deserialize a stream or string of YAML data.""" + if isinstance(stream_or_string, bytes): + stream_or_string = stream_or_string.decode() + if isinstance(stream_or_string, str): + stream = StringIO(stream_or_string) + else: + stream = stream_or_string + try: + yield from PythonDeserializer(yaml.load(stream, Loader=SafeLoader), **options) + except (GeneratorExit, DeserializationError): + raise + except Exception as exc: + raise DeserializationError() from exc diff --git a/testbed/django__django/django/core/serializers/xml_serializer.py b/testbed/django__django/django/core/serializers/xml_serializer.py new file mode 100644 index 0000000000000000000000000000000000000000..e0129a59bb8077498d88bb108a930cf976b03b82 --- /dev/null +++ b/testbed/django__django/django/core/serializers/xml_serializer.py @@ -0,0 +1,496 @@ +""" +XML serializer. +""" +import json +from xml.dom import pulldom +from xml.sax import handler +from xml.sax.expatreader import ExpatParser as _ExpatParser + +from django.apps import apps +from django.conf import settings +from django.core.exceptions import ObjectDoesNotExist +from django.core.serializers import base +from django.db import DEFAULT_DB_ALIAS, models +from django.utils.xmlutils import SimplerXMLGenerator, UnserializableContentError + + +class Serializer(base.Serializer): + """Serialize a QuerySet to XML.""" + + def indent(self, level): + if self.options.get("indent") is not None: + self.xml.ignorableWhitespace( + "\n" + " " * self.options.get("indent") * level + ) + + def start_serialization(self): + """ + Start serialization -- open the XML document and the root element. + """ + self.xml = SimplerXMLGenerator( + self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET) + ) + self.xml.startDocument() + self.xml.startElement("django-objects", {"version": "1.0"}) + + def end_serialization(self): + """ + End serialization -- end the document. + """ + self.indent(0) + self.xml.endElement("django-objects") + self.xml.endDocument() + + def start_object(self, obj): + """ + Called as each object is handled. + """ + if not hasattr(obj, "_meta"): + raise base.SerializationError( + "Non-model object (%s) encountered during serialization" % type(obj) + ) + + self.indent(1) + attrs = {"model": str(obj._meta)} + if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"): + obj_pk = obj.pk + if obj_pk is not None: + attrs["pk"] = str(obj_pk) + + self.xml.startElement("object", attrs) + + def end_object(self, obj): + """ + Called after handling all fields for an object. + """ + self.indent(1) + self.xml.endElement("object") + + def handle_field(self, obj, field): + """ + Handle each field on an object (except for ForeignKeys and + ManyToManyFields). + """ + self.indent(2) + self.xml.startElement( + "field", + { + "name": field.name, + "type": field.get_internal_type(), + }, + ) + + # Get a "string version" of the object's data. + if getattr(obj, field.name) is not None: + value = field.value_to_string(obj) + if field.get_internal_type() == "JSONField": + # Dump value since JSONField.value_to_string() doesn't output + # strings. + value = json.dumps(value, cls=field.encoder) + try: + self.xml.characters(value) + except UnserializableContentError: + raise ValueError( + "%s.%s (pk:%s) contains unserializable characters" + % (obj.__class__.__name__, field.name, obj.pk) + ) + else: + self.xml.addQuickElement("None") + + self.xml.endElement("field") + + def handle_fk_field(self, obj, field): + """ + Handle a ForeignKey (they need to be treated slightly + differently from regular fields). + """ + self._start_relational_field(field) + related_att = getattr(obj, field.get_attname()) + if related_att is not None: + if self.use_natural_foreign_keys and hasattr( + field.remote_field.model, "natural_key" + ): + related = getattr(obj, field.name) + # If related object has a natural key, use it + related = related.natural_key() + # Iterable natural keys are rolled out as subelements + for key_value in related: + self.xml.startElement("natural", {}) + self.xml.characters(str(key_value)) + self.xml.endElement("natural") + else: + self.xml.characters(str(related_att)) + else: + self.xml.addQuickElement("None") + self.xml.endElement("field") + + def handle_m2m_field(self, obj, field): + """ + Handle a ManyToManyField. Related objects are only serialized as + references to the object's PK (i.e. the related *data* is not dumped, + just the relation). + """ + if field.remote_field.through._meta.auto_created: + self._start_relational_field(field) + if self.use_natural_foreign_keys and hasattr( + field.remote_field.model, "natural_key" + ): + # If the objects in the m2m have a natural key, use it + def handle_m2m(value): + natural = value.natural_key() + # Iterable natural keys are rolled out as subelements + self.xml.startElement("object", {}) + for key_value in natural: + self.xml.startElement("natural", {}) + self.xml.characters(str(key_value)) + self.xml.endElement("natural") + self.xml.endElement("object") + + def queryset_iterator(obj, field): + return getattr(obj, field.name).iterator() + + else: + + def handle_m2m(value): + self.xml.addQuickElement("object", attrs={"pk": str(value.pk)}) + + def queryset_iterator(obj, field): + return ( + getattr(obj, field.name).select_related().only("pk").iterator() + ) + + m2m_iter = getattr(obj, "_prefetched_objects_cache", {}).get( + field.name, + queryset_iterator(obj, field), + ) + for relobj in m2m_iter: + handle_m2m(relobj) + + self.xml.endElement("field") + + def _start_relational_field(self, field): + """Output the element for relational fields.""" + self.indent(2) + self.xml.startElement( + "field", + { + "name": field.name, + "rel": field.remote_field.__class__.__name__, + "to": str(field.remote_field.model._meta), + }, + ) + + +class Deserializer(base.Deserializer): + """Deserialize XML.""" + + def __init__( + self, + stream_or_string, + *, + using=DEFAULT_DB_ALIAS, + ignorenonexistent=False, + **options, + ): + super().__init__(stream_or_string, **options) + self.handle_forward_references = options.pop("handle_forward_references", False) + self.event_stream = pulldom.parse(self.stream, self._make_parser()) + self.db = using + self.ignore = ignorenonexistent + + def _make_parser(self): + """Create a hardened XML parser (no custom/external entities).""" + return DefusedExpatParser() + + def __next__(self): + for event, node in self.event_stream: + if event == "START_ELEMENT" and node.nodeName == "object": + self.event_stream.expandNode(node) + return self._handle_object(node) + raise StopIteration + + def _handle_object(self, node): + """Convert an node to a DeserializedObject.""" + # Look up the model using the model loading mechanism. If this fails, + # bail. + Model = self._get_model_from_node(node, "model") + + # Start building a data dictionary from the object. + data = {} + if node.hasAttribute("pk"): + data[Model._meta.pk.attname] = Model._meta.pk.to_python( + node.getAttribute("pk") + ) + + # Also start building a dict of m2m data (this is saved as + # {m2m_accessor_attribute : [list_of_related_objects]}) + m2m_data = {} + deferred_fields = {} + + field_names = {f.name for f in Model._meta.get_fields()} + # Deserialize each field. + for field_node in node.getElementsByTagName("field"): + # If the field is missing the name attribute, bail (are you + # sensing a pattern here?) + field_name = field_node.getAttribute("name") + if not field_name: + raise base.DeserializationError( + " node is missing the 'name' attribute" + ) + + # Get the field from the Model. This will raise a + # FieldDoesNotExist if, well, the field doesn't exist, which will + # be propagated correctly unless ignorenonexistent=True is used. + if self.ignore and field_name not in field_names: + continue + field = Model._meta.get_field(field_name) + + # As is usually the case, relation fields get the special treatment. + if field.remote_field and isinstance( + field.remote_field, models.ManyToManyRel + ): + value = self._handle_m2m_field_node(field_node, field) + if value == base.DEFER_FIELD: + deferred_fields[field] = [ + [ + getInnerText(nat_node).strip() + for nat_node in obj_node.getElementsByTagName("natural") + ] + for obj_node in field_node.getElementsByTagName("object") + ] + else: + m2m_data[field.name] = value + elif field.remote_field and isinstance( + field.remote_field, models.ManyToOneRel + ): + value = self._handle_fk_field_node(field_node, field) + if value == base.DEFER_FIELD: + deferred_fields[field] = [ + getInnerText(k).strip() + for k in field_node.getElementsByTagName("natural") + ] + else: + data[field.attname] = value + else: + if field_node.getElementsByTagName("None"): + value = None + else: + value = field.to_python(getInnerText(field_node).strip()) + # Load value since JSONField.to_python() outputs strings. + if field.get_internal_type() == "JSONField": + value = json.loads(value, cls=field.decoder) + data[field.name] = value + + obj = base.build_instance(Model, data, self.db) + + # Return a DeserializedObject so that the m2m data has a place to live. + return base.DeserializedObject(obj, m2m_data, deferred_fields) + + def _handle_fk_field_node(self, node, field): + """ + Handle a node for a ForeignKey + """ + # Check if there is a child node named 'None', returning None if so. + if node.getElementsByTagName("None"): + return None + else: + model = field.remote_field.model + if hasattr(model._default_manager, "get_by_natural_key"): + keys = node.getElementsByTagName("natural") + if keys: + # If there are 'natural' subelements, it must be a natural key + field_value = [getInnerText(k).strip() for k in keys] + try: + obj = model._default_manager.db_manager( + self.db + ).get_by_natural_key(*field_value) + except ObjectDoesNotExist: + if self.handle_forward_references: + return base.DEFER_FIELD + else: + raise + obj_pk = getattr(obj, field.remote_field.field_name) + # If this is a natural foreign key to an object that + # has a FK/O2O as the foreign key, use the FK value + if field.remote_field.model._meta.pk.remote_field: + obj_pk = obj_pk.pk + else: + # Otherwise, treat like a normal PK + field_value = getInnerText(node).strip() + obj_pk = model._meta.get_field( + field.remote_field.field_name + ).to_python(field_value) + return obj_pk + else: + field_value = getInnerText(node).strip() + return model._meta.get_field(field.remote_field.field_name).to_python( + field_value + ) + + def _handle_m2m_field_node(self, node, field): + """ + Handle a node for a ManyToManyField. + """ + model = field.remote_field.model + default_manager = model._default_manager + if hasattr(default_manager, "get_by_natural_key"): + + def m2m_convert(n): + keys = n.getElementsByTagName("natural") + if keys: + # If there are 'natural' subelements, it must be a natural key + field_value = [getInnerText(k).strip() for k in keys] + obj_pk = ( + default_manager.db_manager(self.db) + .get_by_natural_key(*field_value) + .pk + ) + else: + # Otherwise, treat like a normal PK value. + obj_pk = model._meta.pk.to_python(n.getAttribute("pk")) + return obj_pk + + else: + + def m2m_convert(n): + return model._meta.pk.to_python(n.getAttribute("pk")) + + values = [] + try: + for c in node.getElementsByTagName("object"): + values.append(m2m_convert(c)) + except Exception as e: + if isinstance(e, ObjectDoesNotExist) and self.handle_forward_references: + return base.DEFER_FIELD + else: + raise base.M2MDeserializationError(e, c) + else: + return values + + def _get_model_from_node(self, node, attr): + """ + Look up a model from a or a + node. + """ + model_identifier = node.getAttribute(attr) + if not model_identifier: + raise base.DeserializationError( + "<%s> node is missing the required '%s' attribute" + % (node.nodeName, attr) + ) + try: + return apps.get_model(model_identifier) + except (LookupError, TypeError): + raise base.DeserializationError( + "<%s> node has invalid model identifier: '%s'" + % (node.nodeName, model_identifier) + ) + + +def getInnerText(node): + """Get all the inner text of a DOM node (recursively).""" + # inspired by https://mail.python.org/pipermail/xml-sig/2005-March/011022.html + inner_text = [] + for child in node.childNodes: + if ( + child.nodeType == child.TEXT_NODE + or child.nodeType == child.CDATA_SECTION_NODE + ): + inner_text.append(child.data) + elif child.nodeType == child.ELEMENT_NODE: + inner_text.extend(getInnerText(child)) + else: + pass + return "".join(inner_text) + + +# Below code based on Christian Heimes' defusedxml + + +class DefusedExpatParser(_ExpatParser): + """ + An expat parser hardened against XML bomb attacks. + + Forbid DTDs, external entity references + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.setFeature(handler.feature_external_ges, False) + self.setFeature(handler.feature_external_pes, False) + + def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise DTDForbidden(name, sysid, pubid) + + def entity_decl( + self, name, is_parameter_entity, value, base, sysid, pubid, notation_name + ): + raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) + + def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) + + def external_entity_ref_handler(self, context, base, sysid, pubid): + raise ExternalReferenceForbidden(context, base, sysid, pubid) + + def reset(self): + _ExpatParser.reset(self) + parser = self._parser + parser.StartDoctypeDeclHandler = self.start_doctype_decl + parser.EntityDeclHandler = self.entity_decl + parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl + parser.ExternalEntityRefHandler = self.external_entity_ref_handler + + +class DefusedXmlException(ValueError): + """Base exception.""" + + def __repr__(self): + return str(self) + + +class DTDForbidden(DefusedXmlException): + """Document type definition is forbidden.""" + + def __init__(self, name, sysid, pubid): + super().__init__() + self.name = name + self.sysid = sysid + self.pubid = pubid + + def __str__(self): + tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})" + return tpl.format(self.name, self.sysid, self.pubid) + + +class EntitiesForbidden(DefusedXmlException): + """Entity definition is forbidden.""" + + def __init__(self, name, value, base, sysid, pubid, notation_name): + super().__init__() + self.name = name + self.value = value + self.base = base + self.sysid = sysid + self.pubid = pubid + self.notation_name = notation_name + + def __str__(self): + tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})" + return tpl.format(self.name, self.sysid, self.pubid) + + +class ExternalReferenceForbidden(DefusedXmlException): + """Resolving an external reference is forbidden.""" + + def __init__(self, context, base, sysid, pubid): + super().__init__() + self.context = context + self.base = base + self.sysid = sysid + self.pubid = pubid + + def __str__(self): + tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})" + return tpl.format(self.sysid, self.pubid) diff --git a/testbed/django__django/django/core/servers/__init__.py b/testbed/django__django/django/core/servers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/servers/basehttp.py b/testbed/django__django/django/core/servers/basehttp.py new file mode 100644 index 0000000000000000000000000000000000000000..6afe17cec4771439e364495abb5638e2aa727bf3 --- /dev/null +++ b/testbed/django__django/django/core/servers/basehttp.py @@ -0,0 +1,281 @@ +""" +HTTP server that implements the Python WSGI protocol (PEP 333, rev 1.21). + +Based on wsgiref.simple_server which is part of the standard library since 2.5. + +This is a simple server for use in testing or debugging Django apps. It hasn't +been reviewed for security issues. DON'T USE IT FOR PRODUCTION USE! +""" + +import logging +import socket +import socketserver +import sys +from collections import deque +from wsgiref import simple_server + +from django.core.exceptions import ImproperlyConfigured +from django.core.handlers.wsgi import LimitedStream +from django.core.wsgi import get_wsgi_application +from django.db import connections +from django.utils.module_loading import import_string + +__all__ = ("WSGIServer", "WSGIRequestHandler") + +logger = logging.getLogger("django.server") + + +def get_internal_wsgi_application(): + """ + Load and return the WSGI application as configured by the user in + ``settings.WSGI_APPLICATION``. With the default ``startproject`` layout, + this will be the ``application`` object in ``projectname/wsgi.py``. + + This function, and the ``WSGI_APPLICATION`` setting itself, are only useful + for Django's internal server (runserver); external WSGI servers should just + be configured to point to the correct application object directly. + + If settings.WSGI_APPLICATION is not set (is ``None``), return + whatever ``django.core.wsgi.get_wsgi_application`` returns. + """ + from django.conf import settings + + app_path = getattr(settings, "WSGI_APPLICATION") + if app_path is None: + return get_wsgi_application() + + try: + return import_string(app_path) + except ImportError as err: + raise ImproperlyConfigured( + "WSGI application '%s' could not be loaded; " + "Error importing module." % app_path + ) from err + + +def is_broken_pipe_error(): + exc_type, _, _ = sys.exc_info() + return issubclass( + exc_type, + ( + BrokenPipeError, + ConnectionAbortedError, + ConnectionResetError, + ), + ) + + +class WSGIServer(simple_server.WSGIServer): + """BaseHTTPServer that implements the Python WSGI protocol""" + + request_queue_size = 10 + + def __init__(self, *args, ipv6=False, allow_reuse_address=True, **kwargs): + if ipv6: + self.address_family = socket.AF_INET6 + self.allow_reuse_address = allow_reuse_address + super().__init__(*args, **kwargs) + + def handle_error(self, request, client_address): + if is_broken_pipe_error(): + logger.info("- Broken pipe from %s", client_address) + else: + super().handle_error(request, client_address) + + +class ThreadedWSGIServer(socketserver.ThreadingMixIn, WSGIServer): + """A threaded version of the WSGIServer""" + + daemon_threads = True + + def __init__(self, *args, connections_override=None, **kwargs): + super().__init__(*args, **kwargs) + self.connections_override = connections_override + + # socketserver.ThreadingMixIn.process_request() passes this method as + # the target to a new Thread object. + def process_request_thread(self, request, client_address): + if self.connections_override: + # Override this thread's database connections with the ones + # provided by the parent thread. + for alias, conn in self.connections_override.items(): + connections[alias] = conn + super().process_request_thread(request, client_address) + + def _close_connections(self): + # Used for mocking in tests. + connections.close_all() + + def close_request(self, request): + self._close_connections() + super().close_request(request) + + +class ServerHandler(simple_server.ServerHandler): + http_version = "1.1" + + def __init__(self, stdin, stdout, stderr, environ, **kwargs): + """ + Use a LimitedStream so that unread request data will be ignored at + the end of the request. WSGIRequest uses a LimitedStream but it + shouldn't discard the data since the upstream servers usually do this. + This fix applies only for testserver/runserver. + """ + try: + content_length = int(environ.get("CONTENT_LENGTH")) + except (ValueError, TypeError): + content_length = 0 + super().__init__( + LimitedStream(stdin, content_length), stdout, stderr, environ, **kwargs + ) + + def cleanup_headers(self): + super().cleanup_headers() + if ( + self.environ["REQUEST_METHOD"] == "HEAD" + and "Content-Length" in self.headers + ): + del self.headers["Content-Length"] + # HTTP/1.1 requires support for persistent connections. Send 'close' if + # the content length is unknown to prevent clients from reusing the + # connection. + if ( + self.environ["REQUEST_METHOD"] != "HEAD" + and "Content-Length" not in self.headers + ): + self.headers["Connection"] = "close" + # Persistent connections require threading server. + elif not isinstance(self.request_handler.server, socketserver.ThreadingMixIn): + self.headers["Connection"] = "close" + # Mark the connection for closing if it's set as such above or if the + # application sent the header. + if self.headers.get("Connection") == "close": + self.request_handler.close_connection = True + + def close(self): + self.get_stdin().read() + super().close() + + def finish_response(self): + if self.environ["REQUEST_METHOD"] == "HEAD": + try: + deque(self.result, maxlen=0) # Consume iterator. + # Don't call self.finish_content() as, if the headers have not + # been sent and Content-Length isn't set, it'll default to "0" + # which will prevent omission of the Content-Length header with + # HEAD requests as permitted by RFC 9110 Section 9.3.2. + # Instead, send the headers, if not sent yet. + if not self.headers_sent: + self.send_headers() + finally: + self.close() + else: + super().finish_response() + + +class WSGIRequestHandler(simple_server.WSGIRequestHandler): + protocol_version = "HTTP/1.1" + + def address_string(self): + # Short-circuit parent method to not call socket.getfqdn + return self.client_address[0] + + def log_message(self, format, *args): + extra = { + "request": self.request, + "server_time": self.log_date_time_string(), + } + if args[1][0] == "4": + # 0x16 = Handshake, 0x03 = SSL 3.0 or TLS 1.x + if args[0].startswith("\x16\x03"): + extra["status_code"] = 500 + logger.error( + "You're accessing the development server over HTTPS, but " + "it only supports HTTP.", + extra=extra, + ) + return + + if args[1].isdigit() and len(args[1]) == 3: + status_code = int(args[1]) + extra["status_code"] = status_code + + if status_code >= 500: + level = logger.error + elif status_code >= 400: + level = logger.warning + else: + level = logger.info + else: + level = logger.info + + level(format, *args, extra=extra) + + def get_environ(self): + # Strip all headers with underscores in the name before constructing + # the WSGI environ. This prevents header-spoofing based on ambiguity + # between underscores and dashes both normalized to underscores in WSGI + # env vars. Nginx and Apache 2.4+ both do this as well. + for k in self.headers: + if "_" in k: + del self.headers[k] + + return super().get_environ() + + def handle(self): + self.close_connection = True + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + try: + self.connection.shutdown(socket.SHUT_WR) + except (AttributeError, OSError): + pass + + def handle_one_request(self): + """Copy of WSGIRequestHandler.handle() but with different ServerHandler""" + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = "" + self.request_version = "" + self.command = "" + self.send_error(414) + return + + if not self.parse_request(): # An error code has been sent, just exit + return + + handler = ServerHandler( + self.rfile, self.wfile, self.get_stderr(), self.get_environ() + ) + handler.request_handler = self # backpointer for logging & connection closing + handler.run(self.server.get_app()) + + +def run( + addr, + port, + wsgi_handler, + ipv6=False, + threading=False, + on_bind=None, + server_cls=WSGIServer, +): + server_address = (addr, port) + if threading: + httpd_cls = type("WSGIServer", (socketserver.ThreadingMixIn, server_cls), {}) + else: + httpd_cls = server_cls + httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6) + if on_bind is not None: + on_bind(getattr(httpd, "server_port", port)) + if threading: + # ThreadingMixIn.daemon_threads indicates how threads will behave on an + # abrupt shutdown; like quitting the server by the user or restarting + # by the auto-reloader. True means the server will not wait for thread + # termination before it quits. This will make auto-reloader faster + # and will prevent the need to kill the server manually if a thread + # isn't terminating correctly. + httpd.daemon_threads = True + httpd.set_app(wsgi_handler) + httpd.serve_forever() diff --git a/testbed/django__django/django/core/signals.py b/testbed/django__django/django/core/signals.py new file mode 100644 index 0000000000000000000000000000000000000000..960f5c355b7aede7cbd1aeceac861120ca53c102 --- /dev/null +++ b/testbed/django__django/django/core/signals.py @@ -0,0 +1,6 @@ +from django.dispatch import Signal + +request_started = Signal() +request_finished = Signal() +got_request_exception = Signal() +setting_changed = Signal() diff --git a/testbed/django__django/django/core/signing.py b/testbed/django__django/django/core/signing.py new file mode 100644 index 0000000000000000000000000000000000000000..5521493490df531e2509491fb22d9f81241b2bc8 --- /dev/null +++ b/testbed/django__django/django/core/signing.py @@ -0,0 +1,302 @@ +""" +Functions for creating and restoring url-safe signed JSON objects. + +The format used looks like this: + +>>> signing.dumps("hello") +'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk' + +There are two components here, separated by a ':'. The first component is a +URLsafe base64 encoded JSON of the object passed to dumps(). The second +component is a base64 encoded hmac/SHA-256 hash of "$first_component:$secret" + +signing.loads(s) checks the signature and returns the deserialized object. +If the signature fails, a BadSignature exception is raised. + +>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk") +'hello' +>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified") +... +BadSignature: Signature "ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified" does not match + +You can optionally compress the JSON prior to base64 encoding it to save +space, using the compress=True argument. This checks if compression actually +helps and only applies compression if the result is a shorter string: + +>>> signing.dumps(list(range(1, 20)), compress=True) +'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ' + +The fact that the string is compressed is signalled by the prefixed '.' at the +start of the base64 JSON. + +There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'. +These functions make use of all of them. +""" + +import base64 +import datetime +import json +import time +import warnings +import zlib + +from django.conf import settings +from django.utils.crypto import constant_time_compare, salted_hmac +from django.utils.deprecation import RemovedInDjango51Warning +from django.utils.encoding import force_bytes +from django.utils.module_loading import import_string +from django.utils.regex_helper import _lazy_re_compile + +_SEP_UNSAFE = _lazy_re_compile(r"^[A-z0-9-_=]*$") +BASE62_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + + +class BadSignature(Exception): + """Signature does not match.""" + + pass + + +class SignatureExpired(BadSignature): + """Signature timestamp is older than required max_age.""" + + pass + + +def b62_encode(s): + if s == 0: + return "0" + sign = "-" if s < 0 else "" + s = abs(s) + encoded = "" + while s > 0: + s, remainder = divmod(s, 62) + encoded = BASE62_ALPHABET[remainder] + encoded + return sign + encoded + + +def b62_decode(s): + if s == "0": + return 0 + sign = 1 + if s[0] == "-": + s = s[1:] + sign = -1 + decoded = 0 + for digit in s: + decoded = decoded * 62 + BASE62_ALPHABET.index(digit) + return sign * decoded + + +def b64_encode(s): + return base64.urlsafe_b64encode(s).strip(b"=") + + +def b64_decode(s): + pad = b"=" * (-len(s) % 4) + return base64.urlsafe_b64decode(s + pad) + + +def base64_hmac(salt, value, key, algorithm="sha1"): + return b64_encode( + salted_hmac(salt, value, key, algorithm=algorithm).digest() + ).decode() + + +def _cookie_signer_key(key): + # SECRET_KEYS items may be str or bytes. + return b"django.http.cookies" + force_bytes(key) + + +def get_cookie_signer(salt="django.core.signing.get_cookie_signer"): + Signer = import_string(settings.SIGNING_BACKEND) + return Signer( + key=_cookie_signer_key(settings.SECRET_KEY), + fallback_keys=map(_cookie_signer_key, settings.SECRET_KEY_FALLBACKS), + salt=salt, + ) + + +class JSONSerializer: + """ + Simple wrapper around json to be used in signing.dumps and + signing.loads. + """ + + def dumps(self, obj): + return json.dumps(obj, separators=(",", ":")).encode("latin-1") + + def loads(self, data): + return json.loads(data.decode("latin-1")) + + +def dumps( + obj, key=None, salt="django.core.signing", serializer=JSONSerializer, compress=False +): + """ + Return URL-safe, hmac signed base64 compressed JSON string. If key is + None, use settings.SECRET_KEY instead. The hmac algorithm is the default + Signer algorithm. + + If compress is True (not the default), check if compressing using zlib can + save some space. Prepend a '.' to signify compression. This is included + in the signature, to protect against zip bombs. + + Salt can be used to namespace the hash, so that a signed string is + only valid for a given namespace. Leaving this at the default + value or re-using a salt value across different parts of your + application without good cause is a security risk. + + The serializer is expected to return a bytestring. + """ + return TimestampSigner(key=key, salt=salt).sign_object( + obj, serializer=serializer, compress=compress + ) + + +def loads( + s, + key=None, + salt="django.core.signing", + serializer=JSONSerializer, + max_age=None, + fallback_keys=None, +): + """ + Reverse of dumps(), raise BadSignature if signature fails. + + The serializer is expected to accept a bytestring. + """ + return TimestampSigner( + key=key, salt=salt, fallback_keys=fallback_keys + ).unsign_object( + s, + serializer=serializer, + max_age=max_age, + ) + + +class Signer: + # RemovedInDjango51Warning: When the deprecation ends, replace with: + # def __init__( + # self, *, key=None, sep=":", salt=None, algorithm=None, fallback_keys=None + # ): + def __init__( + self, + *args, + key=None, + sep=":", + salt=None, + algorithm=None, + fallback_keys=None, + ): + self.key = key or settings.SECRET_KEY + self.fallback_keys = ( + fallback_keys + if fallback_keys is not None + else settings.SECRET_KEY_FALLBACKS + ) + self.sep = sep + self.salt = salt or "%s.%s" % ( + self.__class__.__module__, + self.__class__.__name__, + ) + self.algorithm = algorithm or "sha256" + # RemovedInDjango51Warning. + if args: + warnings.warn( + f"Passing positional arguments to {self.__class__.__name__} is " + f"deprecated.", + RemovedInDjango51Warning, + stacklevel=2, + ) + for arg, attr in zip( + args, ["key", "sep", "salt", "algorithm", "fallback_keys"] + ): + if arg or attr == "sep": + setattr(self, attr, arg) + if _SEP_UNSAFE.match(self.sep): + raise ValueError( + "Unsafe Signer separator: %r (cannot be empty or consist of " + "only A-z0-9-_=)" % sep, + ) + + def signature(self, value, key=None): + key = key or self.key + return base64_hmac(self.salt + "signer", value, key, algorithm=self.algorithm) + + def sign(self, value): + return "%s%s%s" % (value, self.sep, self.signature(value)) + + def unsign(self, signed_value): + if self.sep not in signed_value: + raise BadSignature('No "%s" found in value' % self.sep) + value, sig = signed_value.rsplit(self.sep, 1) + for key in [self.key, *self.fallback_keys]: + if constant_time_compare(sig, self.signature(value, key)): + return value + raise BadSignature('Signature "%s" does not match' % sig) + + def sign_object(self, obj, serializer=JSONSerializer, compress=False): + """ + Return URL-safe, hmac signed base64 compressed JSON string. + + If compress is True (not the default), check if compressing using zlib + can save some space. Prepend a '.' to signify compression. This is + included in the signature, to protect against zip bombs. + + The serializer is expected to return a bytestring. + """ + data = serializer().dumps(obj) + # Flag for if it's been compressed or not. + is_compressed = False + + if compress: + # Avoid zlib dependency unless compress is being used. + compressed = zlib.compress(data) + if len(compressed) < (len(data) - 1): + data = compressed + is_compressed = True + base64d = b64_encode(data).decode() + if is_compressed: + base64d = "." + base64d + return self.sign(base64d) + + def unsign_object(self, signed_obj, serializer=JSONSerializer, **kwargs): + # Signer.unsign() returns str but base64 and zlib compression operate + # on bytes. + base64d = self.unsign(signed_obj, **kwargs).encode() + decompress = base64d[:1] == b"." + if decompress: + # It's compressed; uncompress it first. + base64d = base64d[1:] + data = b64_decode(base64d) + if decompress: + data = zlib.decompress(data) + return serializer().loads(data) + + +class TimestampSigner(Signer): + def timestamp(self): + return b62_encode(int(time.time())) + + def sign(self, value): + value = "%s%s%s" % (value, self.sep, self.timestamp()) + return super().sign(value) + + def unsign(self, value, max_age=None): + """ + Retrieve original value and check it wasn't signed more + than max_age seconds ago. + """ + result = super().unsign(value) + value, timestamp = result.rsplit(self.sep, 1) + timestamp = b62_decode(timestamp) + if max_age is not None: + if isinstance(max_age, datetime.timedelta): + max_age = max_age.total_seconds() + # Check timestamp is not older than max_age + age = time.time() - timestamp + if age > max_age: + raise SignatureExpired("Signature age %s > %s seconds" % (age, max_age)) + return value diff --git a/testbed/django__django/django/core/validators.py b/testbed/django__django/django/core/validators.py new file mode 100644 index 0000000000000000000000000000000000000000..fe8d46526ab55e5b2dbcffb7392e8a3eb73d3867 --- /dev/null +++ b/testbed/django__django/django/core/validators.py @@ -0,0 +1,642 @@ +import ipaddress +import math +import re +from pathlib import Path +from urllib.parse import urlsplit, urlunsplit + +from django.core.exceptions import ValidationError +from django.utils.deconstruct import deconstructible +from django.utils.encoding import punycode +from django.utils.ipv6 import is_valid_ipv6_address +from django.utils.regex_helper import _lazy_re_compile +from django.utils.translation import gettext_lazy as _ +from django.utils.translation import ngettext_lazy + +# These values, if given to validate(), will trigger the self.required check. +EMPTY_VALUES = (None, "", [], (), {}) + + +@deconstructible +class RegexValidator: + regex = "" + message = _("Enter a valid value.") + code = "invalid" + inverse_match = False + flags = 0 + + def __init__( + self, regex=None, message=None, code=None, inverse_match=None, flags=None + ): + if regex is not None: + self.regex = regex + if message is not None: + self.message = message + if code is not None: + self.code = code + if inverse_match is not None: + self.inverse_match = inverse_match + if flags is not None: + self.flags = flags + if self.flags and not isinstance(self.regex, str): + raise TypeError( + "If the flags are set, regex must be a regular expression string." + ) + + self.regex = _lazy_re_compile(self.regex, self.flags) + + def __call__(self, value): + """ + Validate that the input contains (or does *not* contain, if + inverse_match is True) a match for the regular expression. + """ + regex_matches = self.regex.search(str(value)) + invalid_input = regex_matches if self.inverse_match else not regex_matches + if invalid_input: + raise ValidationError(self.message, code=self.code, params={"value": value}) + + def __eq__(self, other): + return ( + isinstance(other, RegexValidator) + and self.regex.pattern == other.regex.pattern + and self.regex.flags == other.regex.flags + and (self.message == other.message) + and (self.code == other.code) + and (self.inverse_match == other.inverse_match) + ) + + +@deconstructible +class URLValidator(RegexValidator): + ul = "\u00a1-\uffff" # Unicode letters range (must not be a raw string). + + # IP patterns + ipv4_re = ( + r"(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)" + r"(?:\.(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)){3}" + ) + ipv6_re = r"\[[0-9a-f:.]+\]" # (simple regex, validated later) + + # Host patterns + hostname_re = ( + r"[a-z" + ul + r"0-9](?:[a-z" + ul + r"0-9-]{0,61}[a-z" + ul + r"0-9])?" + ) + # Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1 + domain_re = r"(?:\.(?!-)[a-z" + ul + r"0-9-]{1,63}(? self.max_length: + raise ValidationError(self.message, code=self.code, params={"value": value}) + if self.unsafe_chars.intersection(value): + raise ValidationError(self.message, code=self.code, params={"value": value}) + # Check if the scheme is valid. + scheme = value.split("://")[0].lower() + if scheme not in self.schemes: + raise ValidationError(self.message, code=self.code, params={"value": value}) + + # Then check full URL + try: + splitted_url = urlsplit(value) + except ValueError: + raise ValidationError(self.message, code=self.code, params={"value": value}) + try: + super().__call__(value) + except ValidationError as e: + # Trivial case failed. Try for possible IDN domain + if value: + scheme, netloc, path, query, fragment = splitted_url + try: + netloc = punycode(netloc) # IDN -> ACE + except UnicodeError: # invalid domain part + raise e + url = urlunsplit((scheme, netloc, path, query, fragment)) + super().__call__(url) + else: + raise + else: + # Now verify IPv6 in the netloc part + host_match = re.search(r"^\[(.+)\](?::[0-9]{1,5})?$", splitted_url.netloc) + if host_match: + potential_ip = host_match[1] + try: + validate_ipv6_address(potential_ip) + except ValidationError: + raise ValidationError( + self.message, code=self.code, params={"value": value} + ) + + # The maximum length of a full host name is 253 characters per RFC 1034 + # section 3.1. It's defined to be 255 bytes or less, but this includes + # one byte for the length of the name and one byte for the trailing dot + # that's used to indicate absolute names in DNS. + if splitted_url.hostname is None or len(splitted_url.hostname) > 253: + raise ValidationError(self.message, code=self.code, params={"value": value}) + + +integer_validator = RegexValidator( + _lazy_re_compile(r"^-?\d+\Z"), + message=_("Enter a valid integer."), + code="invalid", +) + + +def validate_integer(value): + return integer_validator(value) + + +@deconstructible +class EmailValidator: + message = _("Enter a valid email address.") + code = "invalid" + user_regex = _lazy_re_compile( + # dot-atom + r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" + # quoted-string + r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])' + r'*"\Z)', + re.IGNORECASE, + ) + domain_regex = _lazy_re_compile( + # max length for domain name labels is 63 characters per RFC 1034 + r"((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(? 320: + raise ValidationError(self.message, code=self.code, params={"value": value}) + + user_part, domain_part = value.rsplit("@", 1) + + if not self.user_regex.match(user_part): + raise ValidationError(self.message, code=self.code, params={"value": value}) + + if domain_part not in self.domain_allowlist and not self.validate_domain_part( + domain_part + ): + # Try for possible IDN domain-part + try: + domain_part = punycode(domain_part) + except UnicodeError: + pass + else: + if self.validate_domain_part(domain_part): + return + raise ValidationError(self.message, code=self.code, params={"value": value}) + + def validate_domain_part(self, domain_part): + if self.domain_regex.match(domain_part): + return True + + literal_match = self.literal_regex.match(domain_part) + if literal_match: + ip_address = literal_match[1] + try: + validate_ipv46_address(ip_address) + return True + except ValidationError: + pass + return False + + def __eq__(self, other): + return ( + isinstance(other, EmailValidator) + and (self.domain_allowlist == other.domain_allowlist) + and (self.message == other.message) + and (self.code == other.code) + ) + + +validate_email = EmailValidator() + +slug_re = _lazy_re_compile(r"^[-a-zA-Z0-9_]+\Z") +validate_slug = RegexValidator( + slug_re, + # Translators: "letters" means latin letters: a-z and A-Z. + _("Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."), + "invalid", +) + +slug_unicode_re = _lazy_re_compile(r"^[-\w]+\Z") +validate_unicode_slug = RegexValidator( + slug_unicode_re, + _( + "Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or " + "hyphens." + ), + "invalid", +) + + +def validate_ipv4_address(value): + try: + ipaddress.IPv4Address(value) + except ValueError: + raise ValidationError( + _("Enter a valid IPv4 address."), code="invalid", params={"value": value} + ) + + +def validate_ipv6_address(value): + if not is_valid_ipv6_address(value): + raise ValidationError( + _("Enter a valid IPv6 address."), code="invalid", params={"value": value} + ) + + +def validate_ipv46_address(value): + try: + validate_ipv4_address(value) + except ValidationError: + try: + validate_ipv6_address(value) + except ValidationError: + raise ValidationError( + _("Enter a valid IPv4 or IPv6 address."), + code="invalid", + params={"value": value}, + ) + + +ip_address_validator_map = { + "both": ([validate_ipv46_address], _("Enter a valid IPv4 or IPv6 address.")), + "ipv4": ([validate_ipv4_address], _("Enter a valid IPv4 address.")), + "ipv6": ([validate_ipv6_address], _("Enter a valid IPv6 address.")), +} + + +def ip_address_validators(protocol, unpack_ipv4): + """ + Depending on the given parameters, return the appropriate validators for + the GenericIPAddressField. + """ + if protocol != "both" and unpack_ipv4: + raise ValueError( + "You can only use `unpack_ipv4` if `protocol` is set to 'both'" + ) + try: + return ip_address_validator_map[protocol.lower()] + except KeyError: + raise ValueError( + "The protocol '%s' is unknown. Supported: %s" + % (protocol, list(ip_address_validator_map)) + ) + + +def int_list_validator(sep=",", message=None, code="invalid", allow_negative=False): + regexp = _lazy_re_compile( + r"^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z" + % { + "neg": "(-)?" if allow_negative else "", + "sep": re.escape(sep), + } + ) + return RegexValidator(regexp, message=message, code=code) + + +validate_comma_separated_integer_list = int_list_validator( + message=_("Enter only digits separated by commas."), +) + + +@deconstructible +class BaseValidator: + message = _("Ensure this value is %(limit_value)s (it is %(show_value)s).") + code = "limit_value" + + def __init__(self, limit_value, message=None): + self.limit_value = limit_value + if message: + self.message = message + + def __call__(self, value): + cleaned = self.clean(value) + limit_value = ( + self.limit_value() if callable(self.limit_value) else self.limit_value + ) + params = {"limit_value": limit_value, "show_value": cleaned, "value": value} + if self.compare(cleaned, limit_value): + raise ValidationError(self.message, code=self.code, params=params) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return ( + self.limit_value == other.limit_value + and self.message == other.message + and self.code == other.code + ) + + def compare(self, a, b): + return a is not b + + def clean(self, x): + return x + + +@deconstructible +class MaxValueValidator(BaseValidator): + message = _("Ensure this value is less than or equal to %(limit_value)s.") + code = "max_value" + + def compare(self, a, b): + return a > b + + +@deconstructible +class MinValueValidator(BaseValidator): + message = _("Ensure this value is greater than or equal to %(limit_value)s.") + code = "min_value" + + def compare(self, a, b): + return a < b + + +@deconstructible +class StepValueValidator(BaseValidator): + message = _("Ensure this value is a multiple of step size %(limit_value)s.") + code = "step_size" + + def __init__(self, limit_value, message=None, offset=None): + super().__init__(limit_value, message) + if offset is not None: + self.message = _( + "Ensure this value is a multiple of step size %(limit_value)s, " + "starting from %(offset)s, e.g. %(offset)s, %(valid_value1)s, " + "%(valid_value2)s, and so on." + ) + self.offset = offset + + def __call__(self, value): + if self.offset is None: + super().__call__(value) + else: + cleaned = self.clean(value) + limit_value = ( + self.limit_value() if callable(self.limit_value) else self.limit_value + ) + if self.compare(cleaned, limit_value): + offset = cleaned.__class__(self.offset) + params = { + "limit_value": limit_value, + "offset": offset, + "valid_value1": offset + limit_value, + "valid_value2": offset + 2 * limit_value, + } + raise ValidationError(self.message, code=self.code, params=params) + + def compare(self, a, b): + offset = 0 if self.offset is None else self.offset + return not math.isclose(math.remainder(a - offset, b), 0, abs_tol=1e-9) + + +@deconstructible +class MinLengthValidator(BaseValidator): + message = ngettext_lazy( + "Ensure this value has at least %(limit_value)d character (it has " + "%(show_value)d).", + "Ensure this value has at least %(limit_value)d characters (it has " + "%(show_value)d).", + "limit_value", + ) + code = "min_length" + + def compare(self, a, b): + return a < b + + def clean(self, x): + return len(x) + + +@deconstructible +class MaxLengthValidator(BaseValidator): + message = ngettext_lazy( + "Ensure this value has at most %(limit_value)d character (it has " + "%(show_value)d).", + "Ensure this value has at most %(limit_value)d characters (it has " + "%(show_value)d).", + "limit_value", + ) + code = "max_length" + + def compare(self, a, b): + return a > b + + def clean(self, x): + return len(x) + + +@deconstructible +class DecimalValidator: + """ + Validate that the input does not exceed the maximum number of digits + expected, otherwise raise ValidationError. + """ + + messages = { + "invalid": _("Enter a number."), + "max_digits": ngettext_lazy( + "Ensure that there are no more than %(max)s digit in total.", + "Ensure that there are no more than %(max)s digits in total.", + "max", + ), + "max_decimal_places": ngettext_lazy( + "Ensure that there are no more than %(max)s decimal place.", + "Ensure that there are no more than %(max)s decimal places.", + "max", + ), + "max_whole_digits": ngettext_lazy( + "Ensure that there are no more than %(max)s digit before the decimal " + "point.", + "Ensure that there are no more than %(max)s digits before the decimal " + "point.", + "max", + ), + } + + def __init__(self, max_digits, decimal_places): + self.max_digits = max_digits + self.decimal_places = decimal_places + + def __call__(self, value): + digit_tuple, exponent = value.as_tuple()[1:] + if exponent in {"F", "n", "N"}: + raise ValidationError( + self.messages["invalid"], code="invalid", params={"value": value} + ) + if exponent >= 0: + digits = len(digit_tuple) + if digit_tuple != (0,): + # A positive exponent adds that many trailing zeros. + digits += exponent + decimals = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all of the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the + # decimal point. + if abs(exponent) > len(digit_tuple): + digits = decimals = abs(exponent) + else: + digits = len(digit_tuple) + decimals = abs(exponent) + whole_digits = digits - decimals + + if self.max_digits is not None and digits > self.max_digits: + raise ValidationError( + self.messages["max_digits"], + code="max_digits", + params={"max": self.max_digits, "value": value}, + ) + if self.decimal_places is not None and decimals > self.decimal_places: + raise ValidationError( + self.messages["max_decimal_places"], + code="max_decimal_places", + params={"max": self.decimal_places, "value": value}, + ) + if ( + self.max_digits is not None + and self.decimal_places is not None + and whole_digits > (self.max_digits - self.decimal_places) + ): + raise ValidationError( + self.messages["max_whole_digits"], + code="max_whole_digits", + params={"max": (self.max_digits - self.decimal_places), "value": value}, + ) + + def __eq__(self, other): + return ( + isinstance(other, self.__class__) + and self.max_digits == other.max_digits + and self.decimal_places == other.decimal_places + ) + + +@deconstructible +class FileExtensionValidator: + message = _( + "File extension “%(extension)s” is not allowed. " + "Allowed extensions are: %(allowed_extensions)s." + ) + code = "invalid_extension" + + def __init__(self, allowed_extensions=None, message=None, code=None): + if allowed_extensions is not None: + allowed_extensions = [ + allowed_extension.lower() for allowed_extension in allowed_extensions + ] + self.allowed_extensions = allowed_extensions + if message is not None: + self.message = message + if code is not None: + self.code = code + + def __call__(self, value): + extension = Path(value.name).suffix[1:].lower() + if ( + self.allowed_extensions is not None + and extension not in self.allowed_extensions + ): + raise ValidationError( + self.message, + code=self.code, + params={ + "extension": extension, + "allowed_extensions": ", ".join(self.allowed_extensions), + "value": value, + }, + ) + + def __eq__(self, other): + return ( + isinstance(other, self.__class__) + and self.allowed_extensions == other.allowed_extensions + and self.message == other.message + and self.code == other.code + ) + + +def get_available_image_extensions(): + try: + from PIL import Image + except ImportError: + return [] + else: + Image.init() + return [ext.lower()[1:] for ext in Image.EXTENSION] + + +def validate_image_file_extension(value): + return FileExtensionValidator(allowed_extensions=get_available_image_extensions())( + value + ) + + +@deconstructible +class ProhibitNullCharactersValidator: + """Validate that the string doesn't contain the null character.""" + + message = _("Null characters are not allowed.") + code = "null_characters_not_allowed" + + def __init__(self, message=None, code=None): + if message is not None: + self.message = message + if code is not None: + self.code = code + + def __call__(self, value): + if "\x00" in str(value): + raise ValidationError(self.message, code=self.code, params={"value": value}) + + def __eq__(self, other): + return ( + isinstance(other, self.__class__) + and self.message == other.message + and self.code == other.code + ) diff --git a/testbed/django__django/django/core/wsgi.py b/testbed/django__django/django/core/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..35e0fa8e80fe280d02b90f9516e4f3839593fb39 --- /dev/null +++ b/testbed/django__django/django/core/wsgi.py @@ -0,0 +1,13 @@ +import django +from django.core.handlers.wsgi import WSGIHandler + + +def get_wsgi_application(): + """ + The public interface to Django's WSGI support. Return a WSGI callable. + + Avoids making django.core.handlers.WSGIHandler a public API, in case the + internal WSGI implementation changes or moves in the future. + """ + django.setup(set_prefix=False) + return WSGIHandler() diff --git a/testbed/django__django/django/db/backends/base/__init__.py b/testbed/django__django/django/db/backends/base/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/db/backends/base/base.py b/testbed/django__django/django/db/backends/base/base.py new file mode 100644 index 0000000000000000000000000000000000000000..84b9974b40b7de2884260444da10455c3c965a43 --- /dev/null +++ b/testbed/django__django/django/db/backends/base/base.py @@ -0,0 +1,788 @@ +import _thread +import copy +import datetime +import logging +import threading +import time +import warnings +import zoneinfo +from collections import deque +from contextlib import contextmanager + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.db import DEFAULT_DB_ALIAS, DatabaseError, NotSupportedError +from django.db.backends import utils +from django.db.backends.base.validation import BaseDatabaseValidation +from django.db.backends.signals import connection_created +from django.db.backends.utils import debug_transaction +from django.db.transaction import TransactionManagementError +from django.db.utils import DatabaseErrorWrapper +from django.utils.asyncio import async_unsafe +from django.utils.functional import cached_property + +NO_DB_ALIAS = "__no_db__" +RAN_DB_VERSION_CHECK = set() + +logger = logging.getLogger("django.db.backends.base") + + +class BaseDatabaseWrapper: + """Represent a database connection.""" + + # Mapping of Field objects to their column types. + data_types = {} + # Mapping of Field objects to their SQL suffix such as AUTOINCREMENT. + data_types_suffix = {} + # Mapping of Field objects to their SQL for CHECK constraints. + data_type_check_constraints = {} + ops = None + vendor = "unknown" + display_name = "unknown" + SchemaEditorClass = None + # Classes instantiated in __init__(). + client_class = None + creation_class = None + features_class = None + introspection_class = None + ops_class = None + validation_class = BaseDatabaseValidation + + queries_limit = 9000 + + def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS): + # Connection related attributes. + # The underlying database connection. + self.connection = None + # `settings_dict` should be a dictionary containing keys such as + # NAME, USER, etc. It's called `settings_dict` instead of `settings` + # to disambiguate it from Django settings modules. + self.settings_dict = settings_dict + self.alias = alias + # Query logging in debug mode or when explicitly enabled. + self.queries_log = deque(maxlen=self.queries_limit) + self.force_debug_cursor = False + + # Transaction related attributes. + # Tracks if the connection is in autocommit mode. Per PEP 249, by + # default, it isn't. + self.autocommit = False + # Tracks if the connection is in a transaction managed by 'atomic'. + self.in_atomic_block = False + # Increment to generate unique savepoint ids. + self.savepoint_state = 0 + # List of savepoints created by 'atomic'. + self.savepoint_ids = [] + # Stack of active 'atomic' blocks. + self.atomic_blocks = [] + # Tracks if the outermost 'atomic' block should commit on exit, + # ie. if autocommit was active on entry. + self.commit_on_exit = True + # Tracks if the transaction should be rolled back to the next + # available savepoint because of an exception in an inner block. + self.needs_rollback = False + self.rollback_exc = None + + # Connection termination related attributes. + self.close_at = None + self.closed_in_transaction = False + self.errors_occurred = False + self.health_check_enabled = False + self.health_check_done = False + + # Thread-safety related attributes. + self._thread_sharing_lock = threading.Lock() + self._thread_sharing_count = 0 + self._thread_ident = _thread.get_ident() + + # A list of no-argument functions to run when the transaction commits. + # Each entry is an (sids, func, robust) tuple, where sids is a set of + # the active savepoint IDs when this function was registered and robust + # specifies whether it's allowed for the function to fail. + self.run_on_commit = [] + + # Should we run the on-commit hooks the next time set_autocommit(True) + # is called? + self.run_commit_hooks_on_set_autocommit_on = False + + # A stack of wrappers to be invoked around execute()/executemany() + # calls. Each entry is a function taking five arguments: execute, sql, + # params, many, and context. It's the function's responsibility to + # call execute(sql, params, many, context). + self.execute_wrappers = [] + + self.client = self.client_class(self) + self.creation = self.creation_class(self) + self.features = self.features_class(self) + self.introspection = self.introspection_class(self) + self.ops = self.ops_class(self) + self.validation = self.validation_class(self) + + def __repr__(self): + return ( + f"<{self.__class__.__qualname__} " + f"vendor={self.vendor!r} alias={self.alias!r}>" + ) + + def ensure_timezone(self): + """ + Ensure the connection's timezone is set to `self.timezone_name` and + return whether it changed or not. + """ + return False + + @cached_property + def timezone(self): + """ + Return a tzinfo of the database connection time zone. + + This is only used when time zone support is enabled. When a datetime is + read from the database, it is always returned in this time zone. + + When the database backend supports time zones, it doesn't matter which + time zone Django uses, as long as aware datetimes are used everywhere. + Other users connecting to the database can choose their own time zone. + + When the database backend doesn't support time zones, the time zone + Django uses may be constrained by the requirements of other users of + the database. + """ + if not settings.USE_TZ: + return None + elif self.settings_dict["TIME_ZONE"] is None: + return datetime.timezone.utc + else: + return zoneinfo.ZoneInfo(self.settings_dict["TIME_ZONE"]) + + @cached_property + def timezone_name(self): + """ + Name of the time zone of the database connection. + """ + if not settings.USE_TZ: + return settings.TIME_ZONE + elif self.settings_dict["TIME_ZONE"] is None: + return "UTC" + else: + return self.settings_dict["TIME_ZONE"] + + @property + def queries_logged(self): + return self.force_debug_cursor or settings.DEBUG + + @property + def queries(self): + if len(self.queries_log) == self.queries_log.maxlen: + warnings.warn( + "Limit for query logging exceeded, only the last {} queries " + "will be returned.".format(self.queries_log.maxlen) + ) + return list(self.queries_log) + + def get_database_version(self): + """Return a tuple of the database's version.""" + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require a get_database_version() " + "method." + ) + + def check_database_version_supported(self): + """ + Raise an error if the database version isn't supported by this + version of Django. + """ + if ( + self.features.minimum_database_version is not None + and self.get_database_version() < self.features.minimum_database_version + ): + db_version = ".".join(map(str, self.get_database_version())) + min_db_version = ".".join(map(str, self.features.minimum_database_version)) + raise NotSupportedError( + f"{self.display_name} {min_db_version} or later is required " + f"(found {db_version})." + ) + + # ##### Backend-specific methods for creating connections and cursors ##### + + def get_connection_params(self): + """Return a dict of parameters suitable for get_new_connection.""" + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require a get_connection_params() " + "method" + ) + + def get_new_connection(self, conn_params): + """Open a connection to the database.""" + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require a get_new_connection() " + "method" + ) + + def init_connection_state(self): + """Initialize the database connection settings.""" + global RAN_DB_VERSION_CHECK + if self.alias not in RAN_DB_VERSION_CHECK: + self.check_database_version_supported() + RAN_DB_VERSION_CHECK.add(self.alias) + + def create_cursor(self, name=None): + """Create a cursor. Assume that a connection is established.""" + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require a create_cursor() method" + ) + + # ##### Backend-specific methods for creating connections ##### + + @async_unsafe + def connect(self): + """Connect to the database. Assume that the connection is closed.""" + # Check for invalid configurations. + self.check_settings() + # In case the previous connection was closed while in an atomic block + self.in_atomic_block = False + self.savepoint_ids = [] + self.atomic_blocks = [] + self.needs_rollback = False + # Reset parameters defining when to close/health-check the connection. + self.health_check_enabled = self.settings_dict["CONN_HEALTH_CHECKS"] + max_age = self.settings_dict["CONN_MAX_AGE"] + self.close_at = None if max_age is None else time.monotonic() + max_age + self.closed_in_transaction = False + self.errors_occurred = False + # New connections are healthy. + self.health_check_done = True + # Establish the connection + conn_params = self.get_connection_params() + self.connection = self.get_new_connection(conn_params) + self.set_autocommit(self.settings_dict["AUTOCOMMIT"]) + self.init_connection_state() + connection_created.send(sender=self.__class__, connection=self) + + self.run_on_commit = [] + + def check_settings(self): + if self.settings_dict["TIME_ZONE"] is not None and not settings.USE_TZ: + raise ImproperlyConfigured( + "Connection '%s' cannot set TIME_ZONE because USE_TZ is False." + % self.alias + ) + + @async_unsafe + def ensure_connection(self): + """Guarantee that a connection to the database is established.""" + if self.connection is None: + with self.wrap_database_errors: + self.connect() + + # ##### Backend-specific wrappers for PEP-249 connection methods ##### + + def _prepare_cursor(self, cursor): + """ + Validate the connection is usable and perform database cursor wrapping. + """ + self.validate_thread_sharing() + if self.queries_logged: + wrapped_cursor = self.make_debug_cursor(cursor) + else: + wrapped_cursor = self.make_cursor(cursor) + return wrapped_cursor + + def _cursor(self, name=None): + self.close_if_health_check_failed() + self.ensure_connection() + with self.wrap_database_errors: + return self._prepare_cursor(self.create_cursor(name)) + + def _commit(self): + if self.connection is not None: + with debug_transaction(self, "COMMIT"), self.wrap_database_errors: + return self.connection.commit() + + def _rollback(self): + if self.connection is not None: + with debug_transaction(self, "ROLLBACK"), self.wrap_database_errors: + return self.connection.rollback() + + def _close(self): + if self.connection is not None: + with self.wrap_database_errors: + return self.connection.close() + + # ##### Generic wrappers for PEP-249 connection methods ##### + + @async_unsafe + def cursor(self): + """Create a cursor, opening a connection if necessary.""" + return self._cursor() + + @async_unsafe + def commit(self): + """Commit a transaction and reset the dirty flag.""" + self.validate_thread_sharing() + self.validate_no_atomic_block() + self._commit() + # A successful commit means that the database connection works. + self.errors_occurred = False + self.run_commit_hooks_on_set_autocommit_on = True + + @async_unsafe + def rollback(self): + """Roll back a transaction and reset the dirty flag.""" + self.validate_thread_sharing() + self.validate_no_atomic_block() + self._rollback() + # A successful rollback means that the database connection works. + self.errors_occurred = False + self.needs_rollback = False + self.run_on_commit = [] + + @async_unsafe + def close(self): + """Close the connection to the database.""" + self.validate_thread_sharing() + self.run_on_commit = [] + + # Don't call validate_no_atomic_block() to avoid making it difficult + # to get rid of a connection in an invalid state. The next connect() + # will reset the transaction state anyway. + if self.closed_in_transaction or self.connection is None: + return + try: + self._close() + finally: + if self.in_atomic_block: + self.closed_in_transaction = True + self.needs_rollback = True + else: + self.connection = None + + # ##### Backend-specific savepoint management methods ##### + + def _savepoint(self, sid): + with self.cursor() as cursor: + cursor.execute(self.ops.savepoint_create_sql(sid)) + + def _savepoint_rollback(self, sid): + with self.cursor() as cursor: + cursor.execute(self.ops.savepoint_rollback_sql(sid)) + + def _savepoint_commit(self, sid): + with self.cursor() as cursor: + cursor.execute(self.ops.savepoint_commit_sql(sid)) + + def _savepoint_allowed(self): + # Savepoints cannot be created outside a transaction + return self.features.uses_savepoints and not self.get_autocommit() + + # ##### Generic savepoint management methods ##### + + @async_unsafe + def savepoint(self): + """ + Create a savepoint inside the current transaction. Return an + identifier for the savepoint that will be used for the subsequent + rollback or commit. Do nothing if savepoints are not supported. + """ + if not self._savepoint_allowed(): + return + + thread_ident = _thread.get_ident() + tid = str(thread_ident).replace("-", "") + + self.savepoint_state += 1 + sid = "s%s_x%d" % (tid, self.savepoint_state) + + self.validate_thread_sharing() + self._savepoint(sid) + + return sid + + @async_unsafe + def savepoint_rollback(self, sid): + """ + Roll back to a savepoint. Do nothing if savepoints are not supported. + """ + if not self._savepoint_allowed(): + return + + self.validate_thread_sharing() + self._savepoint_rollback(sid) + + # Remove any callbacks registered while this savepoint was active. + self.run_on_commit = [ + (sids, func, robust) + for (sids, func, robust) in self.run_on_commit + if sid not in sids + ] + + @async_unsafe + def savepoint_commit(self, sid): + """ + Release a savepoint. Do nothing if savepoints are not supported. + """ + if not self._savepoint_allowed(): + return + + self.validate_thread_sharing() + self._savepoint_commit(sid) + + @async_unsafe + def clean_savepoints(self): + """ + Reset the counter used to generate unique savepoint ids in this thread. + """ + self.savepoint_state = 0 + + # ##### Backend-specific transaction management methods ##### + + def _set_autocommit(self, autocommit): + """ + Backend-specific implementation to enable or disable autocommit. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require a _set_autocommit() method" + ) + + # ##### Generic transaction management methods ##### + + def get_autocommit(self): + """Get the autocommit state.""" + self.ensure_connection() + return self.autocommit + + def set_autocommit( + self, autocommit, force_begin_transaction_with_broken_autocommit=False + ): + """ + Enable or disable autocommit. + + The usual way to start a transaction is to turn autocommit off. + SQLite does not properly start a transaction when disabling + autocommit. To avoid this buggy behavior and to actually enter a new + transaction, an explicit BEGIN is required. Using + force_begin_transaction_with_broken_autocommit=True will issue an + explicit BEGIN with SQLite. This option will be ignored for other + backends. + """ + self.validate_no_atomic_block() + self.close_if_health_check_failed() + self.ensure_connection() + + start_transaction_under_autocommit = ( + force_begin_transaction_with_broken_autocommit + and not autocommit + and hasattr(self, "_start_transaction_under_autocommit") + ) + + if start_transaction_under_autocommit: + self._start_transaction_under_autocommit() + elif autocommit: + self._set_autocommit(autocommit) + else: + with debug_transaction(self, "BEGIN"): + self._set_autocommit(autocommit) + self.autocommit = autocommit + + if autocommit and self.run_commit_hooks_on_set_autocommit_on: + self.run_and_clear_commit_hooks() + self.run_commit_hooks_on_set_autocommit_on = False + + def get_rollback(self): + """Get the "needs rollback" flag -- for *advanced use* only.""" + if not self.in_atomic_block: + raise TransactionManagementError( + "The rollback flag doesn't work outside of an 'atomic' block." + ) + return self.needs_rollback + + def set_rollback(self, rollback): + """ + Set or unset the "needs rollback" flag -- for *advanced use* only. + """ + if not self.in_atomic_block: + raise TransactionManagementError( + "The rollback flag doesn't work outside of an 'atomic' block." + ) + self.needs_rollback = rollback + + def validate_no_atomic_block(self): + """Raise an error if an atomic block is active.""" + if self.in_atomic_block: + raise TransactionManagementError( + "This is forbidden when an 'atomic' block is active." + ) + + def validate_no_broken_transaction(self): + if self.needs_rollback: + raise TransactionManagementError( + "An error occurred in the current transaction. You can't " + "execute queries until the end of the 'atomic' block." + ) from self.rollback_exc + + # ##### Foreign key constraints checks handling ##### + + @contextmanager + def constraint_checks_disabled(self): + """ + Disable foreign key constraint checking. + """ + disabled = self.disable_constraint_checking() + try: + yield + finally: + if disabled: + self.enable_constraint_checking() + + def disable_constraint_checking(self): + """ + Backends can implement as needed to temporarily disable foreign key + constraint checking. Should return True if the constraints were + disabled and will need to be reenabled. + """ + return False + + def enable_constraint_checking(self): + """ + Backends can implement as needed to re-enable foreign key constraint + checking. + """ + pass + + def check_constraints(self, table_names=None): + """ + Backends can override this method if they can apply constraint + checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an + IntegrityError if any invalid foreign key references are encountered. + """ + pass + + # ##### Connection termination handling ##### + + def is_usable(self): + """ + Test if the database connection is usable. + + This method may assume that self.connection is not None. + + Actual implementations should take care not to raise exceptions + as that may prevent Django from recycling unusable connections. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseWrapper may require an is_usable() method" + ) + + def close_if_health_check_failed(self): + """Close existing connection if it fails a health check.""" + if ( + self.connection is None + or not self.health_check_enabled + or self.health_check_done + ): + return + + if not self.is_usable(): + self.close() + self.health_check_done = True + + def close_if_unusable_or_obsolete(self): + """ + Close the current connection if unrecoverable errors have occurred + or if it outlived its maximum age. + """ + if self.connection is not None: + self.health_check_done = False + # If the application didn't restore the original autocommit setting, + # don't take chances, drop the connection. + if self.get_autocommit() != self.settings_dict["AUTOCOMMIT"]: + self.close() + return + + # If an exception other than DataError or IntegrityError occurred + # since the last commit / rollback, check if the connection works. + if self.errors_occurred: + if self.is_usable(): + self.errors_occurred = False + self.health_check_done = True + else: + self.close() + return + + if self.close_at is not None and time.monotonic() >= self.close_at: + self.close() + return + + # ##### Thread safety handling ##### + + @property + def allow_thread_sharing(self): + with self._thread_sharing_lock: + return self._thread_sharing_count > 0 + + def inc_thread_sharing(self): + with self._thread_sharing_lock: + self._thread_sharing_count += 1 + + def dec_thread_sharing(self): + with self._thread_sharing_lock: + if self._thread_sharing_count <= 0: + raise RuntimeError( + "Cannot decrement the thread sharing count below zero." + ) + self._thread_sharing_count -= 1 + + def validate_thread_sharing(self): + """ + Validate that the connection isn't accessed by another thread than the + one which originally created it, unless the connection was explicitly + authorized to be shared between threads (via the `inc_thread_sharing()` + method). Raise an exception if the validation fails. + """ + if not (self.allow_thread_sharing or self._thread_ident == _thread.get_ident()): + raise DatabaseError( + "DatabaseWrapper objects created in a " + "thread can only be used in that same thread. The object " + "with alias '%s' was created in thread id %s and this is " + "thread id %s." % (self.alias, self._thread_ident, _thread.get_ident()) + ) + + # ##### Miscellaneous ##### + + def prepare_database(self): + """ + Hook to do any database check or preparation, generally called before + migrating a project or an app. + """ + pass + + @cached_property + def wrap_database_errors(self): + """ + Context manager and decorator that re-throws backend-specific database + exceptions using Django's common wrappers. + """ + return DatabaseErrorWrapper(self) + + def chunked_cursor(self): + """ + Return a cursor that tries to avoid caching in the database (if + supported by the database), otherwise return a regular cursor. + """ + return self.cursor() + + def make_debug_cursor(self, cursor): + """Create a cursor that logs all queries in self.queries_log.""" + return utils.CursorDebugWrapper(cursor, self) + + def make_cursor(self, cursor): + """Create a cursor without debug logging.""" + return utils.CursorWrapper(cursor, self) + + @contextmanager + def temporary_connection(self): + """ + Context manager that ensures that a connection is established, and + if it opened one, closes it to avoid leaving a dangling connection. + This is useful for operations outside of the request-response cycle. + + Provide a cursor: with self.temporary_connection() as cursor: ... + """ + must_close = self.connection is None + try: + with self.cursor() as cursor: + yield cursor + finally: + if must_close: + self.close() + + @contextmanager + def _nodb_cursor(self): + """ + Return a cursor from an alternative connection to be used when there is + no need to access the main database, specifically for test db + creation/deletion. This also prevents the production database from + being exposed to potential child threads while (or after) the test + database is destroyed. Refs #10868, #17786, #16969. + """ + conn = self.__class__({**self.settings_dict, "NAME": None}, alias=NO_DB_ALIAS) + try: + with conn.cursor() as cursor: + yield cursor + finally: + conn.close() + + def schema_editor(self, *args, **kwargs): + """ + Return a new instance of this backend's SchemaEditor. + """ + if self.SchemaEditorClass is None: + raise NotImplementedError( + "The SchemaEditorClass attribute of this database wrapper is still None" + ) + return self.SchemaEditorClass(self, *args, **kwargs) + + def on_commit(self, func, robust=False): + if not callable(func): + raise TypeError("on_commit()'s callback must be a callable.") + if self.in_atomic_block: + # Transaction in progress; save for execution on commit. + self.run_on_commit.append((set(self.savepoint_ids), func, robust)) + elif not self.get_autocommit(): + raise TransactionManagementError( + "on_commit() cannot be used in manual transaction management" + ) + else: + # No transaction in progress and in autocommit mode; execute + # immediately. + if robust: + try: + func() + except Exception as e: + logger.error( + f"Error calling {func.__qualname__} in on_commit() (%s).", + e, + exc_info=True, + ) + else: + func() + + def run_and_clear_commit_hooks(self): + self.validate_no_atomic_block() + current_run_on_commit = self.run_on_commit + self.run_on_commit = [] + while current_run_on_commit: + _, func, robust = current_run_on_commit.pop(0) + if robust: + try: + func() + except Exception as e: + logger.error( + f"Error calling {func.__qualname__} in on_commit() during " + f"transaction (%s).", + e, + exc_info=True, + ) + else: + func() + + @contextmanager + def execute_wrapper(self, wrapper): + """ + Return a context manager under which the wrapper is applied to suitable + database query executions. + """ + self.execute_wrappers.append(wrapper) + try: + yield + finally: + self.execute_wrappers.pop() + + def copy(self, alias=None): + """ + Return a copy of this connection. + + For tests that require two connections to the same database. + """ + settings_dict = copy.deepcopy(self.settings_dict) + if alias is None: + alias = self.alias + return type(self)(settings_dict, alias) diff --git a/testbed/django__django/django/db/backends/base/client.py b/testbed/django__django/django/db/backends/base/client.py new file mode 100644 index 0000000000000000000000000000000000000000..031056372d7967b8c495d170c49503c76b54754d --- /dev/null +++ b/testbed/django__django/django/db/backends/base/client.py @@ -0,0 +1,28 @@ +import os +import subprocess + + +class BaseDatabaseClient: + """Encapsulate backend-specific methods for opening a client shell.""" + + # This should be a string representing the name of the executable + # (e.g., "psql"). Subclasses must override this. + executable_name = None + + def __init__(self, connection): + # connection is an instance of BaseDatabaseWrapper. + self.connection = connection + + @classmethod + def settings_to_cmd_args_env(cls, settings_dict, parameters): + raise NotImplementedError( + "subclasses of BaseDatabaseClient must provide a " + "settings_to_cmd_args_env() method or override a runshell()." + ) + + def runshell(self, parameters): + args, env = self.settings_to_cmd_args_env( + self.connection.settings_dict, parameters + ) + env = {**os.environ, **env} if env else None + subprocess.run(args, env=env, check=True) diff --git a/testbed/django__django/django/db/backends/base/creation.py b/testbed/django__django/django/db/backends/base/creation.py new file mode 100644 index 0000000000000000000000000000000000000000..cbac8a7f7672067fdafb0635fc191a897977f00b --- /dev/null +++ b/testbed/django__django/django/db/backends/base/creation.py @@ -0,0 +1,381 @@ +import os +import sys +from io import StringIO + +from django.apps import apps +from django.conf import settings +from django.core import serializers +from django.db import router +from django.db.transaction import atomic +from django.utils.module_loading import import_string + +# The prefix to put on the default database name when creating +# the test database. +TEST_DATABASE_PREFIX = "test_" + + +class BaseDatabaseCreation: + """ + Encapsulate backend-specific differences pertaining to creation and + destruction of the test database. + """ + + def __init__(self, connection): + self.connection = connection + + def _nodb_cursor(self): + return self.connection._nodb_cursor() + + def log(self, msg): + sys.stderr.write(msg + os.linesep) + + def create_test_db( + self, verbosity=1, autoclobber=False, serialize=True, keepdb=False + ): + """ + Create a test database, prompting the user for confirmation if the + database already exists. Return the name of the test database created. + """ + # Don't import django.core.management if it isn't needed. + from django.core.management import call_command + + test_database_name = self._get_test_db_name() + + if verbosity >= 1: + action = "Creating" + if keepdb: + action = "Using existing" + + self.log( + "%s test database for alias %s..." + % ( + action, + self._get_database_display_str(verbosity, test_database_name), + ) + ) + + # We could skip this call if keepdb is True, but we instead + # give it the keepdb param. This is to handle the case + # where the test DB doesn't exist, in which case we need to + # create it, then just not destroy it. If we instead skip + # this, we will get an exception. + self._create_test_db(verbosity, autoclobber, keepdb) + + self.connection.close() + settings.DATABASES[self.connection.alias]["NAME"] = test_database_name + self.connection.settings_dict["NAME"] = test_database_name + + try: + if self.connection.settings_dict["TEST"]["MIGRATE"] is False: + # Disable migrations for all apps. + old_migration_modules = settings.MIGRATION_MODULES + settings.MIGRATION_MODULES = { + app.label: None for app in apps.get_app_configs() + } + # We report migrate messages at one level lower than that + # requested. This ensures we don't get flooded with messages during + # testing (unless you really ask to be flooded). + call_command( + "migrate", + verbosity=max(verbosity - 1, 0), + interactive=False, + database=self.connection.alias, + run_syncdb=True, + ) + finally: + if self.connection.settings_dict["TEST"]["MIGRATE"] is False: + settings.MIGRATION_MODULES = old_migration_modules + + # We then serialize the current state of the database into a string + # and store it on the connection. This slightly horrific process is so people + # who are testing on databases without transactions or who are using + # a TransactionTestCase still get a clean database on every test run. + if serialize: + self.connection._test_serialized_contents = self.serialize_db_to_string() + + call_command("createcachetable", database=self.connection.alias) + + # Ensure a connection for the side effect of initializing the test database. + self.connection.ensure_connection() + + if os.environ.get("RUNNING_DJANGOS_TEST_SUITE") == "true": + self.mark_expected_failures_and_skips() + + return test_database_name + + def set_as_test_mirror(self, primary_settings_dict): + """ + Set this database up to be used in testing as a mirror of a primary + database whose settings are given. + """ + self.connection.settings_dict["NAME"] = primary_settings_dict["NAME"] + + def serialize_db_to_string(self): + """ + Serialize all data in the database into a JSON string. + Designed only for test runner usage; will not handle large + amounts of data. + """ + + # Iteratively return every object for all models to serialize. + def get_objects(): + from django.db.migrations.loader import MigrationLoader + + loader = MigrationLoader(self.connection) + for app_config in apps.get_app_configs(): + if ( + app_config.models_module is not None + and app_config.label in loader.migrated_apps + and app_config.name not in settings.TEST_NON_SERIALIZED_APPS + ): + for model in app_config.get_models(): + if model._meta.can_migrate( + self.connection + ) and router.allow_migrate_model(self.connection.alias, model): + queryset = model._base_manager.using( + self.connection.alias, + ).order_by(model._meta.pk.name) + yield from queryset.iterator() + + # Serialize to a string + out = StringIO() + serializers.serialize("json", get_objects(), indent=None, stream=out) + return out.getvalue() + + def deserialize_db_from_string(self, data): + """ + Reload the database with data from a string generated by + the serialize_db_to_string() method. + """ + data = StringIO(data) + table_names = set() + # Load data in a transaction to handle forward references and cycles. + with atomic(using=self.connection.alias): + # Disable constraint checks, because some databases (MySQL) doesn't + # support deferred checks. + with self.connection.constraint_checks_disabled(): + for obj in serializers.deserialize( + "json", data, using=self.connection.alias + ): + obj.save() + table_names.add(obj.object.__class__._meta.db_table) + # Manually check for any invalid keys that might have been added, + # because constraint checks were disabled. + self.connection.check_constraints(table_names=table_names) + + def _get_database_display_str(self, verbosity, database_name): + """ + Return display string for a database for use in various actions. + """ + return "'%s'%s" % ( + self.connection.alias, + (" ('%s')" % database_name) if verbosity >= 2 else "", + ) + + def _get_test_db_name(self): + """ + Internal implementation - return the name of the test DB that will be + created. Only useful when called from create_test_db() and + _create_test_db() and when no external munging is done with the 'NAME' + settings. + """ + if self.connection.settings_dict["TEST"]["NAME"]: + return self.connection.settings_dict["TEST"]["NAME"] + return TEST_DATABASE_PREFIX + self.connection.settings_dict["NAME"] + + def _execute_create_test_db(self, cursor, parameters, keepdb=False): + cursor.execute("CREATE DATABASE %(dbname)s %(suffix)s" % parameters) + + def _create_test_db(self, verbosity, autoclobber, keepdb=False): + """ + Internal implementation - create the test db tables. + """ + test_database_name = self._get_test_db_name() + test_db_params = { + "dbname": self.connection.ops.quote_name(test_database_name), + "suffix": self.sql_table_creation_suffix(), + } + # Create the test database and connect to it. + with self._nodb_cursor() as cursor: + try: + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception as e: + # if we want to keep the db, then no need to do any of the below, + # just return and skip it all. + if keepdb: + return test_database_name + + self.log("Got an error creating the test database: %s" % e) + if not autoclobber: + confirm = input( + "Type 'yes' if you would like to try deleting the test " + "database '%s', or 'no' to cancel: " % test_database_name + ) + if autoclobber or confirm == "yes": + try: + if verbosity >= 1: + self.log( + "Destroying old test database for alias %s..." + % ( + self._get_database_display_str( + verbosity, test_database_name + ), + ) + ) + cursor.execute("DROP DATABASE %(dbname)s" % test_db_params) + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception as e: + self.log("Got an error recreating the test database: %s" % e) + sys.exit(2) + else: + self.log("Tests cancelled.") + sys.exit(1) + + return test_database_name + + def clone_test_db(self, suffix, verbosity=1, autoclobber=False, keepdb=False): + """ + Clone a test database. + """ + source_database_name = self.connection.settings_dict["NAME"] + + if verbosity >= 1: + action = "Cloning test database" + if keepdb: + action = "Using existing clone" + self.log( + "%s for alias %s..." + % ( + action, + self._get_database_display_str(verbosity, source_database_name), + ) + ) + + # We could skip this call if keepdb is True, but we instead + # give it the keepdb param. See create_test_db for details. + self._clone_test_db(suffix, verbosity, keepdb) + + def get_test_db_clone_settings(self, suffix): + """ + Return a modified connection settings dict for the n-th clone of a DB. + """ + # When this function is called, the test database has been created + # already and its name has been copied to settings_dict['NAME'] so + # we don't need to call _get_test_db_name. + orig_settings_dict = self.connection.settings_dict + return { + **orig_settings_dict, + "NAME": "{}_{}".format(orig_settings_dict["NAME"], suffix), + } + + def _clone_test_db(self, suffix, verbosity, keepdb=False): + """ + Internal implementation - duplicate the test db tables. + """ + raise NotImplementedError( + "The database backend doesn't support cloning databases. " + "Disable the option to run tests in parallel processes." + ) + + def destroy_test_db( + self, old_database_name=None, verbosity=1, keepdb=False, suffix=None + ): + """ + Destroy a test database, prompting the user for confirmation if the + database already exists. + """ + self.connection.close() + if suffix is None: + test_database_name = self.connection.settings_dict["NAME"] + else: + test_database_name = self.get_test_db_clone_settings(suffix)["NAME"] + + if verbosity >= 1: + action = "Destroying" + if keepdb: + action = "Preserving" + self.log( + "%s test database for alias %s..." + % ( + action, + self._get_database_display_str(verbosity, test_database_name), + ) + ) + + # if we want to preserve the database + # skip the actual destroying piece. + if not keepdb: + self._destroy_test_db(test_database_name, verbosity) + + # Restore the original database name + if old_database_name is not None: + settings.DATABASES[self.connection.alias]["NAME"] = old_database_name + self.connection.settings_dict["NAME"] = old_database_name + + def _destroy_test_db(self, test_database_name, verbosity): + """ + Internal implementation - remove the test db tables. + """ + # Remove the test database to clean up after + # ourselves. Connect to the previous database (not the test database) + # to do so, because it's not allowed to delete a database while being + # connected to it. + with self._nodb_cursor() as cursor: + cursor.execute( + "DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name) + ) + + def mark_expected_failures_and_skips(self): + """ + Mark tests in Django's test suite which are expected failures on this + database and test which should be skipped on this database. + """ + # Only load unittest if we're actually testing. + from unittest import expectedFailure, skip + + for test_name in self.connection.features.django_test_expected_failures: + test_case_name, _, test_method_name = test_name.rpartition(".") + test_app = test_name.split(".")[0] + # Importing a test app that isn't installed raises RuntimeError. + if test_app in settings.INSTALLED_APPS: + test_case = import_string(test_case_name) + test_method = getattr(test_case, test_method_name) + setattr(test_case, test_method_name, expectedFailure(test_method)) + for reason, tests in self.connection.features.django_test_skips.items(): + for test_name in tests: + test_case_name, _, test_method_name = test_name.rpartition(".") + test_app = test_name.split(".")[0] + # Importing a test app that isn't installed raises RuntimeError. + if test_app in settings.INSTALLED_APPS: + test_case = import_string(test_case_name) + test_method = getattr(test_case, test_method_name) + setattr(test_case, test_method_name, skip(reason)(test_method)) + + def sql_table_creation_suffix(self): + """ + SQL to append to the end of the test table creation statements. + """ + return "" + + def test_db_signature(self): + """ + Return a tuple with elements of self.connection.settings_dict (a + DATABASES setting value) that uniquely identify a database + accordingly to the RDBMS particularities. + """ + settings_dict = self.connection.settings_dict + return ( + settings_dict["HOST"], + settings_dict["PORT"], + settings_dict["ENGINE"], + self._get_test_db_name(), + ) + + def setup_worker_connection(self, _worker_id): + settings_dict = self.get_test_db_clone_settings(str(_worker_id)) + # connection.settings_dict must be updated in place for changes to be + # reflected in django.db.connections. If the following line assigned + # connection.settings_dict = settings_dict, new threads would connect + # to the default database instead of the appropriate clone. + self.connection.settings_dict.update(settings_dict) + self.connection.close() diff --git a/testbed/django__django/django/db/backends/base/features.py b/testbed/django__django/django/db/backends/base/features.py new file mode 100644 index 0000000000000000000000000000000000000000..11dd0791109c2926ab75f0fc477a32fbd7407952 --- /dev/null +++ b/testbed/django__django/django/db/backends/base/features.py @@ -0,0 +1,408 @@ +from django.db import ProgrammingError +from django.utils.functional import cached_property + + +class BaseDatabaseFeatures: + # An optional tuple indicating the minimum supported database version. + minimum_database_version = None + gis_enabled = False + # Oracle can't group by LOB (large object) data types. + allows_group_by_lob = True + allows_group_by_selected_pks = False + allows_group_by_select_index = True + empty_fetchmany_value = [] + update_can_self_select = True + # Does the backend support self-reference subqueries in the DELETE + # statement? + delete_can_self_reference_subquery = True + + # Does the backend distinguish between '' and None? + interprets_empty_strings_as_nulls = False + + # Does the backend allow inserting duplicate NULL rows in a nullable + # unique field? All core backends implement this correctly, but other + # databases such as SQL Server do not. + supports_nullable_unique_constraints = True + + # Does the backend allow inserting duplicate rows when a unique_together + # constraint exists and some fields are nullable but not all of them? + supports_partially_nullable_unique_constraints = True + # Does the backend support initially deferrable unique constraints? + supports_deferrable_unique_constraints = False + + can_use_chunked_reads = True + can_return_columns_from_insert = False + can_return_rows_from_bulk_insert = False + has_bulk_insert = True + uses_savepoints = True + can_release_savepoints = False + + # If True, don't use integer foreign keys referring to, e.g., positive + # integer primary keys. + related_fields_match_type = False + allow_sliced_subqueries_with_in = True + has_select_for_update = False + has_select_for_update_nowait = False + has_select_for_update_skip_locked = False + has_select_for_update_of = False + has_select_for_no_key_update = False + # Does the database's SELECT FOR UPDATE OF syntax require a column rather + # than a table? + select_for_update_of_column = False + + # Does the default test database allow multiple connections? + # Usually an indication that the test database is in-memory + test_db_allows_multiple_connections = True + + # Can an object be saved without an explicit primary key? + supports_unspecified_pk = False + + # Can a fixture contain forward references? i.e., are + # FK constraints checked at the end of transaction, or + # at the end of each save operation? + supports_forward_references = True + + # Does the backend truncate names properly when they are too long? + truncates_names = False + + # Is there a REAL datatype in addition to floats/doubles? + has_real_datatype = False + supports_subqueries_in_group_by = True + + # Does the backend ignore unnecessary ORDER BY clauses in subqueries? + ignores_unnecessary_order_by_in_subqueries = True + + # Is there a true datatype for uuid? + has_native_uuid_field = False + + # Is there a true datatype for timedeltas? + has_native_duration_field = False + + # Does the database driver supports same type temporal data subtraction + # by returning the type used to store duration field? + supports_temporal_subtraction = False + + # Does the __regex lookup support backreferencing and grouping? + supports_regex_backreferencing = True + + # Can date/datetime lookups be performed using a string? + supports_date_lookup_using_string = True + + # Can datetimes with timezones be used? + supports_timezones = True + + # Does the database have a copy of the zoneinfo database? + has_zoneinfo_database = True + + # When performing a GROUP BY, is an ORDER BY NULL required + # to remove any ordering? + requires_explicit_null_ordering_when_grouping = False + + # Does the backend order NULL values as largest or smallest? + nulls_order_largest = False + + # Does the backend support NULLS FIRST and NULLS LAST in ORDER BY? + supports_order_by_nulls_modifier = True + + # Does the backend orders NULLS FIRST by default? + order_by_nulls_first = False + + # The database's limit on the number of query parameters. + max_query_params = None + + # Can an object have an autoincrement primary key of 0? + allows_auto_pk_0 = True + + # Do we need to NULL a ForeignKey out, or can the constraint check be + # deferred + can_defer_constraint_checks = False + + # Does the backend support tablespaces? Default to False because it isn't + # in the SQL standard. + supports_tablespaces = False + + # Does the backend reset sequences between tests? + supports_sequence_reset = True + + # Can the backend introspect the default value of a column? + can_introspect_default = True + + # Confirm support for introspected foreign keys + # Every database can do this reliably, except MySQL, + # which can't do it for MyISAM tables + can_introspect_foreign_keys = True + + # Map fields which some backends may not be able to differentiate to the + # field it's introspected as. + introspected_field_types = { + "AutoField": "AutoField", + "BigAutoField": "BigAutoField", + "BigIntegerField": "BigIntegerField", + "BinaryField": "BinaryField", + "BooleanField": "BooleanField", + "CharField": "CharField", + "DurationField": "DurationField", + "GenericIPAddressField": "GenericIPAddressField", + "IntegerField": "IntegerField", + "PositiveBigIntegerField": "PositiveBigIntegerField", + "PositiveIntegerField": "PositiveIntegerField", + "PositiveSmallIntegerField": "PositiveSmallIntegerField", + "SmallAutoField": "SmallAutoField", + "SmallIntegerField": "SmallIntegerField", + "TimeField": "TimeField", + } + + # Can the backend introspect the column order (ASC/DESC) for indexes? + supports_index_column_ordering = True + + # Does the backend support introspection of materialized views? + can_introspect_materialized_views = False + + # Support for the DISTINCT ON clause + can_distinct_on_fields = False + + # Does the backend prevent running SQL queries in broken transactions? + atomic_transactions = True + + # Can we roll back DDL in a transaction? + can_rollback_ddl = False + + schema_editor_uses_clientside_param_binding = False + + # Does it support operations requiring references rename in a transaction? + supports_atomic_references_rename = True + + # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? + supports_combined_alters = False + + # Does it support foreign keys? + supports_foreign_keys = True + + # Can it create foreign key constraints inline when adding columns? + can_create_inline_fk = True + + # Can an index be renamed? + can_rename_index = False + + # Does it automatically index foreign keys? + indexes_foreign_keys = True + + # Does it support CHECK constraints? + supports_column_check_constraints = True + supports_table_check_constraints = True + # Does the backend support introspection of CHECK constraints? + can_introspect_check_constraints = True + + # Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value}) + # parameter passing? Note this can be provided by the backend even if not + # supported by the Python driver + supports_paramstyle_pyformat = True + + # Does the backend require literal defaults, rather than parameterized ones? + requires_literal_defaults = False + + # Does the backend support functions in defaults? + supports_expression_defaults = True + + # Does the backend support the DEFAULT keyword in insert queries? + supports_default_keyword_in_insert = True + + # Does the backend support the DEFAULT keyword in bulk insert queries? + supports_default_keyword_in_bulk_insert = True + + # Does the backend require a connection reset after each material schema change? + connection_persists_old_columns = False + + # What kind of error does the backend throw when accessing closed cursor? + closed_cursor_error_class = ProgrammingError + + # Does 'a' LIKE 'A' match? + has_case_insensitive_like = False + + # Suffix for backends that don't support "SELECT xxx;" queries. + bare_select_suffix = "" + + # If NULL is implied on columns without needing to be explicitly specified + implied_column_null = False + + # Does the backend support "select for update" queries with limit (and offset)? + supports_select_for_update_with_limit = True + + # Does the backend ignore null expressions in GREATEST and LEAST queries unless + # every expression is null? + greatest_least_ignores_nulls = False + + # Can the backend clone databases for parallel test execution? + # Defaults to False to allow third-party backends to opt-in. + can_clone_databases = False + + # Does the backend consider table names with different casing to + # be equal? + ignores_table_name_case = False + + # Place FOR UPDATE right after FROM clause. Used on MSSQL. + for_update_after_from = False + + # Combinatorial flags + supports_select_union = True + supports_select_intersection = True + supports_select_difference = True + supports_slicing_ordering_in_compound = False + supports_parentheses_in_compound = True + requires_compound_order_by_subquery = False + + # Does the database support SQL 2003 FILTER (WHERE ...) in aggregate + # expressions? + supports_aggregate_filter_clause = False + + # Does the backend support indexing a TextField? + supports_index_on_text_field = True + + # Does the backend support window expressions (expression OVER (...))? + supports_over_clause = False + supports_frame_range_fixed_distance = False + only_supports_unbounded_with_preceding_and_following = False + + # Does the backend support CAST with precision? + supports_cast_with_precision = True + + # How many second decimals does the database return when casting a value to + # a type with time? + time_cast_precision = 6 + + # SQL to create a procedure for use by the Django test suite. The + # functionality of the procedure isn't important. + create_test_procedure_without_params_sql = None + create_test_procedure_with_int_param_sql = None + + # SQL to create a table with a composite primary key for use by the Django + # test suite. + create_test_table_with_composite_primary_key = None + + # Does the backend support keyword parameters for cursor.callproc()? + supports_callproc_kwargs = False + + # What formats does the backend EXPLAIN syntax support? + supported_explain_formats = set() + + # Does the backend support the default parameter in lead() and lag()? + supports_default_in_lead_lag = True + + # Does the backend support ignoring constraint or uniqueness errors during + # INSERT? + supports_ignore_conflicts = True + # Does the backend support updating rows on constraint or uniqueness errors + # during INSERT? + supports_update_conflicts = False + supports_update_conflicts_with_target = False + + # Does this backend require casting the results of CASE expressions used + # in UPDATE statements to ensure the expression has the correct type? + requires_casted_case_in_updates = False + + # Does the backend support partial indexes (CREATE INDEX ... WHERE ...)? + supports_partial_indexes = True + supports_functions_in_partial_indexes = True + # Does the backend support covering indexes (CREATE INDEX ... INCLUDE ...)? + supports_covering_indexes = False + # Does the backend support indexes on expressions? + supports_expression_indexes = True + # Does the backend treat COLLATE as an indexed expression? + collate_as_index_expression = False + + # Does the database allow more than one constraint or index on the same + # field(s)? + allows_multiple_constraints_on_same_fields = True + + # Does the backend support boolean expressions in SELECT and GROUP BY + # clauses? + supports_boolean_expr_in_select_clause = True + # Does the backend support comparing boolean expressions in WHERE clauses? + # Eg: WHERE (price > 0) IS NOT NULL + supports_comparing_boolean_expr = True + + # Does the backend support JSONField? + supports_json_field = True + # Can the backend introspect a JSONField? + can_introspect_json_field = True + # Does the backend support primitives in JSONField? + supports_primitives_in_json_field = True + # Is there a true datatype for JSON? + has_native_json_field = False + # Does the backend use PostgreSQL-style JSON operators like '->'? + has_json_operators = False + # Does the backend support __contains and __contained_by lookups for + # a JSONField? + supports_json_field_contains = True + # Does value__d__contains={'f': 'g'} (without a list around the dict) match + # {'d': [{'f': 'g'}]}? + json_key_contains_list_matching_requires_list = False + # Does the backend support JSONObject() database function? + has_json_object_function = True + + # Does the backend support column collations? + supports_collation_on_charfield = True + supports_collation_on_textfield = True + # Does the backend support non-deterministic collations? + supports_non_deterministic_collations = True + + # Does the backend support column and table comments? + supports_comments = False + # Does the backend support column comments in ADD COLUMN statements? + supports_comments_inline = False + + # Does the backend support the logical XOR operator? + supports_logical_xor = False + + # Set to (exception, message) if null characters in text are disallowed. + prohibits_null_characters_in_text_exception = None + + # Does the backend support unlimited character columns? + supports_unlimited_charfield = False + + # Collation names for use by the Django test suite. + test_collations = { + "ci": None, # Case-insensitive. + "cs": None, # Case-sensitive. + "non_default": None, # Non-default. + "swedish_ci": None, # Swedish case-insensitive. + } + # SQL template override for tests.aggregation.tests.NowUTC + test_now_utc_template = None + + # SQL to create a model instance using the database defaults. + insert_test_table_with_defaults = None + + # A set of dotted paths to tests in Django's test suite that are expected + # to fail on this database. + django_test_expected_failures = set() + # A map of reasons to sets of dotted paths to tests in Django's test suite + # that should be skipped for this database. + django_test_skips = {} + + def __init__(self, connection): + self.connection = connection + + @cached_property + def supports_explaining_query_execution(self): + """Does this backend support explaining query execution?""" + return self.connection.ops.explain_prefix is not None + + @cached_property + def supports_transactions(self): + """Confirm support for transactions.""" + with self.connection.cursor() as cursor: + cursor.execute("CREATE TABLE ROLLBACK_TEST (X INT)") + self.connection.set_autocommit(False) + cursor.execute("INSERT INTO ROLLBACK_TEST (X) VALUES (8)") + self.connection.rollback() + self.connection.set_autocommit(True) + cursor.execute("SELECT COUNT(X) FROM ROLLBACK_TEST") + (count,) = cursor.fetchone() + cursor.execute("DROP TABLE ROLLBACK_TEST") + return count == 0 + + def allows_group_by_selected_pks_on_model(self, model): + if not self.allows_group_by_selected_pks: + return False + return model._meta.managed diff --git a/testbed/django__django/django/db/backends/base/introspection.py b/testbed/django__django/django/db/backends/base/introspection.py new file mode 100644 index 0000000000000000000000000000000000000000..5e4acb3ff9098e55f051d2255f380b2b35e0766d --- /dev/null +++ b/testbed/django__django/django/db/backends/base/introspection.py @@ -0,0 +1,212 @@ +from collections import namedtuple + +# Structure returned by DatabaseIntrospection.get_table_list() +TableInfo = namedtuple("TableInfo", ["name", "type"]) + +# Structure returned by the DB-API cursor.description interface (PEP 249) +FieldInfo = namedtuple( + "FieldInfo", + "name type_code display_size internal_size precision scale null_ok " + "default collation", +) + + +class BaseDatabaseIntrospection: + """Encapsulate backend-specific introspection utilities.""" + + data_types_reverse = {} + + def __init__(self, connection): + self.connection = connection + + def get_field_type(self, data_type, description): + """ + Hook for a database backend to use the cursor description to + match a Django field type to a database column. + + For Oracle, the column data_type on its own is insufficient to + distinguish between a FloatField and IntegerField, for example. + """ + return self.data_types_reverse[data_type] + + def identifier_converter(self, name): + """ + Apply a conversion to the identifier for the purposes of comparison. + + The default identifier converter is for case sensitive comparison. + """ + return name + + def table_names(self, cursor=None, include_views=False): + """ + Return a list of names of all tables that exist in the database. + Sort the returned table list by Python's default sorting. Do NOT use + the database's ORDER BY here to avoid subtle differences in sorting + order between databases. + """ + + def get_names(cursor): + return sorted( + ti.name + for ti in self.get_table_list(cursor) + if include_views or ti.type == "t" + ) + + if cursor is None: + with self.connection.cursor() as cursor: + return get_names(cursor) + return get_names(cursor) + + def get_table_list(self, cursor): + """ + Return an unsorted list of TableInfo named tuples of all tables and + views that exist in the database. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseIntrospection may require a get_table_list() " + "method" + ) + + def get_table_description(self, cursor, table_name): + """ + Return a description of the table with the DB-API cursor.description + interface. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseIntrospection may require a " + "get_table_description() method." + ) + + def get_migratable_models(self): + from django.apps import apps + from django.db import router + + return ( + model + for app_config in apps.get_app_configs() + for model in router.get_migratable_models(app_config, self.connection.alias) + if model._meta.can_migrate(self.connection) + ) + + def django_table_names(self, only_existing=False, include_views=True): + """ + Return a list of all table names that have associated Django models and + are in INSTALLED_APPS. + + If only_existing is True, include only the tables in the database. + """ + tables = set() + for model in self.get_migratable_models(): + if not model._meta.managed: + continue + tables.add(model._meta.db_table) + tables.update( + f.m2m_db_table() + for f in model._meta.local_many_to_many + if f.remote_field.through._meta.managed + ) + tables = list(tables) + if only_existing: + existing_tables = set(self.table_names(include_views=include_views)) + tables = [ + t for t in tables if self.identifier_converter(t) in existing_tables + ] + return tables + + def installed_models(self, tables): + """ + Return a set of all models represented by the provided list of table + names. + """ + tables = set(map(self.identifier_converter, tables)) + return { + m + for m in self.get_migratable_models() + if self.identifier_converter(m._meta.db_table) in tables + } + + def sequence_list(self): + """ + Return a list of information about all DB sequences for all models in + all apps. + """ + sequence_list = [] + with self.connection.cursor() as cursor: + for model in self.get_migratable_models(): + if not model._meta.managed: + continue + if model._meta.swapped: + continue + sequence_list.extend( + self.get_sequences( + cursor, model._meta.db_table, model._meta.local_fields + ) + ) + for f in model._meta.local_many_to_many: + # If this is an m2m using an intermediate table, + # we don't need to reset the sequence. + if f.remote_field.through._meta.auto_created: + sequence = self.get_sequences(cursor, f.m2m_db_table()) + sequence_list.extend( + sequence or [{"table": f.m2m_db_table(), "column": None}] + ) + return sequence_list + + def get_sequences(self, cursor, table_name, table_fields=()): + """ + Return a list of introspected sequences for table_name. Each sequence + is a dict: {'table': , 'column': }. An optional + 'name' key can be added if the backend supports named sequences. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseIntrospection may require a get_sequences() " + "method" + ) + + def get_relations(self, cursor, table_name): + """ + Return a dictionary of {field_name: (field_name_other_table, other_table)} + representing all foreign keys in the given table. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseIntrospection may require a " + "get_relations() method." + ) + + def get_primary_key_column(self, cursor, table_name): + """ + Return the name of the primary key column for the given table. + """ + columns = self.get_primary_key_columns(cursor, table_name) + return columns[0] if columns else None + + def get_primary_key_columns(self, cursor, table_name): + """Return a list of primary key columns for the given table.""" + for constraint in self.get_constraints(cursor, table_name).values(): + if constraint["primary_key"]: + return constraint["columns"] + return None + + def get_constraints(self, cursor, table_name): + """ + Retrieve any constraints or keys (unique, pk, fk, check, index) + across one or more columns. + + Return a dict mapping constraint names to their attributes, + where attributes is a dict with keys: + * columns: List of columns this covers + * primary_key: True if primary key, False otherwise + * unique: True if this is a unique constraint, False otherwise + * foreign_key: (table, column) of target, or None + * check: True if check constraint, False otherwise + * index: True if index, False otherwise. + * orders: The order (ASC/DESC) defined for the columns of indexes + * type: The type of the index (btree, hash, etc.) + + Some backends may return special constraint names that don't exist + if they don't name constraints of a certain type (e.g. SQLite) + """ + raise NotImplementedError( + "subclasses of BaseDatabaseIntrospection may require a get_constraints() " + "method" + ) diff --git a/testbed/django__django/django/db/backends/base/operations.py b/testbed/django__django/django/db/backends/base/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..6f10e31cd5bc41536ac69475f7d9eb8a3f41d562 --- /dev/null +++ b/testbed/django__django/django/db/backends/base/operations.py @@ -0,0 +1,785 @@ +import datetime +import decimal +import json +from importlib import import_module + +import sqlparse + +from django.conf import settings +from django.db import NotSupportedError, transaction +from django.db.backends import utils +from django.db.models.expressions import Col +from django.utils import timezone +from django.utils.encoding import force_str + + +class BaseDatabaseOperations: + """ + Encapsulate backend-specific differences, such as the way a backend + performs ordering or calculates the ID of a recently-inserted row. + """ + + compiler_module = "django.db.models.sql.compiler" + + # Integer field safe ranges by `internal_type` as documented + # in docs/ref/models/fields.txt. + integer_field_ranges = { + "SmallIntegerField": (-32768, 32767), + "IntegerField": (-2147483648, 2147483647), + "BigIntegerField": (-9223372036854775808, 9223372036854775807), + "PositiveBigIntegerField": (0, 9223372036854775807), + "PositiveSmallIntegerField": (0, 32767), + "PositiveIntegerField": (0, 2147483647), + "SmallAutoField": (-32768, 32767), + "AutoField": (-2147483648, 2147483647), + "BigAutoField": (-9223372036854775808, 9223372036854775807), + } + set_operators = { + "union": "UNION", + "intersection": "INTERSECT", + "difference": "EXCEPT", + } + # Mapping of Field.get_internal_type() (typically the model field's class + # name) to the data type to use for the Cast() function, if different from + # DatabaseWrapper.data_types. + cast_data_types = {} + # CharField data type if the max_length argument isn't provided. + cast_char_field_without_max_length = None + + # Start and end points for window expressions. + PRECEDING = "PRECEDING" + FOLLOWING = "FOLLOWING" + UNBOUNDED_PRECEDING = "UNBOUNDED " + PRECEDING + UNBOUNDED_FOLLOWING = "UNBOUNDED " + FOLLOWING + CURRENT_ROW = "CURRENT ROW" + + # Prefix for EXPLAIN queries, or None EXPLAIN isn't supported. + explain_prefix = None + + def __init__(self, connection): + self.connection = connection + self._cache = None + + def autoinc_sql(self, table, column): + """ + Return any SQL needed to support auto-incrementing primary keys, or + None if no SQL is necessary. + + This SQL is executed when a table is created. + """ + return None + + def bulk_batch_size(self, fields, objs): + """ + Return the maximum allowed batch size for the backend. The fields + are the fields going to be inserted in the batch, the objs contains + all the objects to be inserted. + """ + return len(objs) + + def format_for_duration_arithmetic(self, sql): + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a " + "format_for_duration_arithmetic() method." + ) + + def cache_key_culling_sql(self): + """ + Return an SQL query that retrieves the first cache key greater than the + n smallest. + + This is used by the 'db' cache backend to determine where to start + culling. + """ + cache_key = self.quote_name("cache_key") + return f"SELECT {cache_key} FROM %s ORDER BY {cache_key} LIMIT 1 OFFSET %%s" + + def unification_cast_sql(self, output_field): + """ + Given a field instance, return the SQL that casts the result of a union + to that type. The resulting string should contain a '%s' placeholder + for the expression being cast. + """ + return "%s" + + def date_extract_sql(self, lookup_type, sql, params): + """ + Given a lookup_type of 'year', 'month', or 'day', return the SQL that + extracts a value from the given date field field_name. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a date_extract_sql() " + "method" + ) + + def date_trunc_sql(self, lookup_type, sql, params, tzname=None): + """ + Given a lookup_type of 'year', 'month', or 'day', return the SQL that + truncates the given date or datetime field field_name to a date object + with only the given specificity. + + If `tzname` is provided, the given value is truncated in a specific + timezone. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a date_trunc_sql() " + "method." + ) + + def datetime_cast_date_sql(self, sql, params, tzname): + """ + Return the SQL to cast a datetime value to date value. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a " + "datetime_cast_date_sql() method." + ) + + def datetime_cast_time_sql(self, sql, params, tzname): + """ + Return the SQL to cast a datetime value to time value. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a " + "datetime_cast_time_sql() method" + ) + + def datetime_extract_sql(self, lookup_type, sql, params, tzname): + """ + Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or + 'second', return the SQL that extracts a value from the given + datetime field field_name. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a datetime_extract_sql() " + "method" + ) + + def datetime_trunc_sql(self, lookup_type, sql, params, tzname): + """ + Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or + 'second', return the SQL that truncates the given datetime field + field_name to a datetime object with only the given specificity. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() " + "method" + ) + + def time_trunc_sql(self, lookup_type, sql, params, tzname=None): + """ + Given a lookup_type of 'hour', 'minute' or 'second', return the SQL + that truncates the given time or datetime field field_name to a time + object with only the given specificity. + + If `tzname` is provided, the given value is truncated in a specific + timezone. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a time_trunc_sql() method" + ) + + def time_extract_sql(self, lookup_type, sql, params): + """ + Given a lookup_type of 'hour', 'minute', or 'second', return the SQL + that extracts a value from the given time field field_name. + """ + return self.date_extract_sql(lookup_type, sql, params) + + def deferrable_sql(self): + """ + Return the SQL to make a constraint "initially deferred" during a + CREATE TABLE statement. + """ + return "" + + def distinct_sql(self, fields, params): + """ + Return an SQL DISTINCT clause which removes duplicate rows from the + result set. If any fields are given, only check the given fields for + duplicates. + """ + if fields: + raise NotSupportedError( + "DISTINCT ON fields is not supported by this database backend" + ) + else: + return ["DISTINCT"], [] + + def fetch_returned_insert_columns(self, cursor, returning_params): + """ + Given a cursor object that has just performed an INSERT...RETURNING + statement into a table, return the newly created data. + """ + return cursor.fetchone() + + def field_cast_sql(self, db_type, internal_type): + """ + Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type + (e.g. 'GenericIPAddressField'), return the SQL to cast it before using + it in a WHERE statement. The resulting string should contain a '%s' + placeholder for the column being searched against. + """ + return "%s" + + def force_no_ordering(self): + """ + Return a list used in the "ORDER BY" clause to force no ordering at + all. Return an empty list to include nothing in the ordering. + """ + return [] + + def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False): + """ + Return the FOR UPDATE SQL clause to lock rows for an update operation. + """ + return "FOR%s UPDATE%s%s%s" % ( + " NO KEY" if no_key else "", + " OF %s" % ", ".join(of) if of else "", + " NOWAIT" if nowait else "", + " SKIP LOCKED" if skip_locked else "", + ) + + def _get_limit_offset_params(self, low_mark, high_mark): + offset = low_mark or 0 + if high_mark is not None: + return (high_mark - offset), offset + elif offset: + return self.connection.ops.no_limit_value(), offset + return None, offset + + def limit_offset_sql(self, low_mark, high_mark): + """Return LIMIT/OFFSET SQL clause.""" + limit, offset = self._get_limit_offset_params(low_mark, high_mark) + return " ".join( + sql + for sql in ( + ("LIMIT %d" % limit) if limit else None, + ("OFFSET %d" % offset) if offset else None, + ) + if sql + ) + + def last_executed_query(self, cursor, sql, params): + """ + Return a string of the query last executed by the given cursor, with + placeholders replaced with actual values. + + `sql` is the raw query containing placeholders and `params` is the + sequence of parameters. These are used by default, but this method + exists for database backends to provide a better implementation + according to their own quoting schemes. + """ + + # Convert params to contain string values. + def to_string(s): + return force_str(s, strings_only=True, errors="replace") + + if isinstance(params, (list, tuple)): + u_params = tuple(to_string(val) for val in params) + elif params is None: + u_params = () + else: + u_params = {to_string(k): to_string(v) for k, v in params.items()} + + return "QUERY = %r - PARAMS = %r" % (sql, u_params) + + def last_insert_id(self, cursor, table_name, pk_name): + """ + Given a cursor object that has just performed an INSERT statement into + a table that has an auto-incrementing ID, return the newly created ID. + + `pk_name` is the name of the primary-key column. + """ + return cursor.lastrowid + + def lookup_cast(self, lookup_type, internal_type=None): + """ + Return the string to use in a query when performing lookups + ("contains", "like", etc.). It should contain a '%s' placeholder for + the column being searched against. + """ + return "%s" + + def max_in_list_size(self): + """ + Return the maximum number of items that can be passed in a single 'IN' + list condition, or None if the backend does not impose a limit. + """ + return None + + def max_name_length(self): + """ + Return the maximum length of table and column names, or None if there + is no limit. + """ + return None + + def no_limit_value(self): + """ + Return the value to use for the LIMIT when we are wanting "LIMIT + infinity". Return None if the limit clause can be omitted in this case. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a no_limit_value() method" + ) + + def pk_default_value(self): + """ + Return the value to use during an INSERT statement to specify that + the field should use its default value. + """ + return "DEFAULT" + + def prepare_sql_script(self, sql): + """ + Take an SQL script that may contain multiple lines and return a list + of statements to feed to successive cursor.execute() calls. + + Since few databases are able to process raw SQL scripts in a single + cursor.execute() call and PEP 249 doesn't talk about this use case, + the default implementation is conservative. + """ + return [ + sqlparse.format(statement, strip_comments=True) + for statement in sqlparse.split(sql) + if statement + ] + + def process_clob(self, value): + """ + Return the value of a CLOB column, for backends that return a locator + object that requires additional processing. + """ + return value + + def return_insert_columns(self, fields): + """ + For backends that support returning columns as part of an insert query, + return the SQL and params to append to the INSERT query. The returned + fragment should contain a format string to hold the appropriate column. + """ + pass + + def compiler(self, compiler_name): + """ + Return the SQLCompiler class corresponding to the given name, + in the namespace corresponding to the `compiler_module` attribute + on this backend. + """ + if self._cache is None: + self._cache = import_module(self.compiler_module) + return getattr(self._cache, compiler_name) + + def quote_name(self, name): + """ + Return a quoted version of the given table, index, or column name. Do + not quote the given name if it's already been quoted. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a quote_name() method" + ) + + def regex_lookup(self, lookup_type): + """ + Return the string to use in a query when performing regular expression + lookups (using "regex" or "iregex"). It should contain a '%s' + placeholder for the column being searched against. + + If the feature is not supported (or part of it is not supported), raise + NotImplementedError. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations may require a regex_lookup() method" + ) + + def savepoint_create_sql(self, sid): + """ + Return the SQL for starting a new savepoint. Only required if the + "uses_savepoints" feature is True. The "sid" parameter is a string + for the savepoint id. + """ + return "SAVEPOINT %s" % self.quote_name(sid) + + def savepoint_commit_sql(self, sid): + """ + Return the SQL for committing the given savepoint. + """ + return "RELEASE SAVEPOINT %s" % self.quote_name(sid) + + def savepoint_rollback_sql(self, sid): + """ + Return the SQL for rolling back the given savepoint. + """ + return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid) + + def set_time_zone_sql(self): + """ + Return the SQL that will set the connection's time zone. + + Return '' if the backend doesn't support time zones. + """ + return "" + + def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): + """ + Return a list of SQL statements required to remove all data from + the given database tables (without actually removing the tables + themselves). + + The `style` argument is a Style object as returned by either + color_style() or no_style() in django.core.management.color. + + If `reset_sequences` is True, the list includes SQL statements required + to reset the sequences. + + The `allow_cascade` argument determines whether truncation may cascade + to tables with foreign keys pointing the tables being truncated. + PostgreSQL requires a cascade even if these tables are empty. + """ + raise NotImplementedError( + "subclasses of BaseDatabaseOperations must provide an sql_flush() method" + ) + + def execute_sql_flush(self, sql_list): + """Execute a list of SQL statements to flush the database.""" + with transaction.atomic( + using=self.connection.alias, + savepoint=self.connection.features.can_rollback_ddl, + ): + with self.connection.cursor() as cursor: + for sql in sql_list: + cursor.execute(sql) + + def sequence_reset_by_name_sql(self, style, sequences): + """ + Return a list of the SQL statements required to reset sequences + passed in `sequences`. + + The `style` argument is a Style object as returned by either + color_style() or no_style() in django.core.management.color. + """ + return [] + + def sequence_reset_sql(self, style, model_list): + """ + Return a list of the SQL statements required to reset sequences for + the given models. + + The `style` argument is a Style object as returned by either + color_style() or no_style() in django.core.management.color. + """ + return [] # No sequence reset required by default. + + def start_transaction_sql(self): + """Return the SQL statement required to start a transaction.""" + return "BEGIN;" + + def end_transaction_sql(self, success=True): + """Return the SQL statement required to end a transaction.""" + if not success: + return "ROLLBACK;" + return "COMMIT;" + + def tablespace_sql(self, tablespace, inline=False): + """ + Return the SQL that will be used in a query to define the tablespace. + + Return '' if the backend doesn't support tablespaces. + + If `inline` is True, append the SQL to a row; otherwise append it to + the entire CREATE TABLE or CREATE INDEX statement. + """ + return "" + + def prep_for_like_query(self, x): + """Prepare a value for use in a LIKE query.""" + return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_") + + # Same as prep_for_like_query(), but called for "iexact" matches, which + # need not necessarily be implemented using "LIKE" in the backend. + prep_for_iexact_query = prep_for_like_query + + def validate_autopk_value(self, value): + """ + Certain backends do not accept some values for "serial" fields + (for example zero in MySQL). Raise a ValueError if the value is + invalid, otherwise return the validated value. + """ + return value + + def adapt_unknown_value(self, value): + """ + Transform a value to something compatible with the backend driver. + + This method only depends on the type of the value. It's designed for + cases where the target type isn't known, such as .raw() SQL queries. + As a consequence it may not work perfectly in all circumstances. + """ + if isinstance(value, datetime.datetime): # must be before date + return self.adapt_datetimefield_value(value) + elif isinstance(value, datetime.date): + return self.adapt_datefield_value(value) + elif isinstance(value, datetime.time): + return self.adapt_timefield_value(value) + elif isinstance(value, decimal.Decimal): + return self.adapt_decimalfield_value(value) + else: + return value + + def adapt_integerfield_value(self, value, internal_type): + return value + + def adapt_datefield_value(self, value): + """ + Transform a date value to an object compatible with what is expected + by the backend driver for date columns. + """ + if value is None: + return None + return str(value) + + def adapt_datetimefield_value(self, value): + """ + Transform a datetime value to an object compatible with what is expected + by the backend driver for datetime columns. + """ + if value is None: + return None + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + return str(value) + + def adapt_timefield_value(self, value): + """ + Transform a time value to an object compatible with what is expected + by the backend driver for time columns. + """ + if value is None: + return None + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + if timezone.is_aware(value): + raise ValueError("Django does not support timezone-aware times.") + return str(value) + + def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): + """ + Transform a decimal.Decimal value to an object compatible with what is + expected by the backend driver for decimal (numeric) columns. + """ + return utils.format_number(value, max_digits, decimal_places) + + def adapt_ipaddressfield_value(self, value): + """ + Transform a string representation of an IP address into the expected + type for the backend driver. + """ + return value or None + + def adapt_json_value(self, value, encoder): + return json.dumps(value, cls=encoder) + + def year_lookup_bounds_for_date_field(self, value, iso_year=False): + """ + Return a two-elements list with the lower and upper bound to be used + with a BETWEEN operator to query a DateField value using a year + lookup. + + `value` is an int, containing the looked-up year. + If `iso_year` is True, return bounds for ISO-8601 week-numbering years. + """ + if iso_year: + first = datetime.date.fromisocalendar(value, 1, 1) + second = datetime.date.fromisocalendar( + value + 1, 1, 1 + ) - datetime.timedelta(days=1) + else: + first = datetime.date(value, 1, 1) + second = datetime.date(value, 12, 31) + first = self.adapt_datefield_value(first) + second = self.adapt_datefield_value(second) + return [first, second] + + def year_lookup_bounds_for_datetime_field(self, value, iso_year=False): + """ + Return a two-elements list with the lower and upper bound to be used + with a BETWEEN operator to query a DateTimeField value using a year + lookup. + + `value` is an int, containing the looked-up year. + If `iso_year` is True, return bounds for ISO-8601 week-numbering years. + """ + if iso_year: + first = datetime.datetime.fromisocalendar(value, 1, 1) + second = datetime.datetime.fromisocalendar( + value + 1, 1, 1 + ) - datetime.timedelta(microseconds=1) + else: + first = datetime.datetime(value, 1, 1) + second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999) + if settings.USE_TZ: + tz = timezone.get_current_timezone() + first = timezone.make_aware(first, tz) + second = timezone.make_aware(second, tz) + first = self.adapt_datetimefield_value(first) + second = self.adapt_datetimefield_value(second) + return [first, second] + + def get_db_converters(self, expression): + """ + Return a list of functions needed to convert field data. + + Some field types on some backends do not provide data in the correct + format, this is the hook for converter functions. + """ + return [] + + def convert_durationfield_value(self, value, expression, connection): + if value is not None: + return datetime.timedelta(0, 0, value) + + def check_expression_support(self, expression): + """ + Check that the backend supports the provided expression. + + This is used on specific backends to rule out known expressions + that have problematic or nonexistent implementations. If the + expression has a known problem, the backend should raise + NotSupportedError. + """ + pass + + def conditional_expression_supported_in_where_clause(self, expression): + """ + Return True, if the conditional expression is supported in the WHERE + clause. + """ + return True + + def combine_expression(self, connector, sub_expressions): + """ + Combine a list of subexpressions into a single expression, using + the provided connecting operator. This is required because operators + can vary between backends (e.g., Oracle with %% and &) and between + subexpression types (e.g., date expressions). + """ + conn = " %s " % connector + return conn.join(sub_expressions) + + def combine_duration_expression(self, connector, sub_expressions): + return self.combine_expression(connector, sub_expressions) + + def binary_placeholder_sql(self, value): + """ + Some backends require special syntax to insert binary content (MySQL + for example uses '_binary %s'). + """ + return "%s" + + def modify_insert_params(self, placeholder, params): + """ + Allow modification of insert parameters. Needed for Oracle Spatial + backend due to #10888. + """ + return params + + def integer_field_range(self, internal_type): + """ + Given an integer field internal type (e.g. 'PositiveIntegerField'), + return a tuple of the (min_value, max_value) form representing the + range of the column type bound to the field. + """ + return self.integer_field_ranges[internal_type] + + def subtract_temporals(self, internal_type, lhs, rhs): + if self.connection.features.supports_temporal_subtraction: + lhs_sql, lhs_params = lhs + rhs_sql, rhs_params = rhs + return "(%s - %s)" % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params) + raise NotSupportedError( + "This backend does not support %s subtraction." % internal_type + ) + + def window_frame_start(self, start): + if isinstance(start, int): + if start < 0: + return "%d %s" % (abs(start), self.PRECEDING) + elif start == 0: + return self.CURRENT_ROW + elif start is None: + return self.UNBOUNDED_PRECEDING + raise ValueError( + "start argument must be a negative integer, zero, or None, but got '%s'." + % start + ) + + def window_frame_end(self, end): + if isinstance(end, int): + if end == 0: + return self.CURRENT_ROW + elif end > 0: + return "%d %s" % (end, self.FOLLOWING) + elif end is None: + return self.UNBOUNDED_FOLLOWING + raise ValueError( + "end argument must be a positive integer, zero, or None, but got '%s'." + % end + ) + + def window_frame_rows_start_end(self, start=None, end=None): + """ + Return SQL for start and end points in an OVER clause window frame. + """ + if not self.connection.features.supports_over_clause: + raise NotSupportedError("This backend does not support window expressions.") + return self.window_frame_start(start), self.window_frame_end(end) + + def window_frame_range_start_end(self, start=None, end=None): + start_, end_ = self.window_frame_rows_start_end(start, end) + features = self.connection.features + if features.only_supports_unbounded_with_preceding_and_following and ( + (start and start < 0) or (end and end > 0) + ): + raise NotSupportedError( + "%s only supports UNBOUNDED together with PRECEDING and " + "FOLLOWING." % self.connection.display_name + ) + return start_, end_ + + def explain_query_prefix(self, format=None, **options): + if not self.connection.features.supports_explaining_query_execution: + raise NotSupportedError( + "This backend does not support explaining query execution." + ) + if format: + supported_formats = self.connection.features.supported_explain_formats + normalized_format = format.upper() + if normalized_format not in supported_formats: + msg = "%s is not a recognized format." % normalized_format + if supported_formats: + msg += " Allowed formats: %s" % ", ".join(sorted(supported_formats)) + else: + msg += ( + f" {self.connection.display_name} does not support any formats." + ) + raise ValueError(msg) + if options: + raise ValueError("Unknown options: %s" % ", ".join(sorted(options.keys()))) + return self.explain_prefix + + def insert_statement(self, on_conflict=None): + return "INSERT INTO" + + def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields): + return "" + + def prepare_join_on_clause(self, lhs_table, lhs_field, rhs_table, rhs_field): + lhs_expr = Col(lhs_table, lhs_field) + rhs_expr = Col(rhs_table, rhs_field) + + return lhs_expr, rhs_expr diff --git a/testbed/django__django/django/db/backends/base/schema.py b/testbed/django__django/django/db/backends/base/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..2742748049dcc07b7bdba3f69c27372251be6cbc --- /dev/null +++ b/testbed/django__django/django/db/backends/base/schema.py @@ -0,0 +1,1919 @@ +import logging +import operator +from datetime import datetime + +from django.conf import settings +from django.db.backends.ddl_references import ( + Columns, + Expressions, + ForeignKeyName, + IndexName, + Statement, + Table, +) +from django.db.backends.utils import names_digest, split_identifier, truncate_name +from django.db.models import NOT_PROVIDED, Deferrable, Index +from django.db.models.sql import Query +from django.db.transaction import TransactionManagementError, atomic +from django.utils import timezone + +logger = logging.getLogger("django.db.backends.schema") + + +def _is_relevant_relation(relation, altered_field): + """ + When altering the given field, must constraints on its model from the given + relation be temporarily dropped? + """ + field = relation.field + if field.many_to_many: + # M2M reverse field + return False + if altered_field.primary_key and field.to_fields == [None]: + # Foreign key constraint on the primary key, which is being altered. + return True + # Is the constraint targeting the field being altered? + return altered_field.name in field.to_fields + + +def _all_related_fields(model): + # Related fields must be returned in a deterministic order. + return sorted( + model._meta._get_fields( + forward=False, + reverse=True, + include_hidden=True, + include_parents=False, + ), + key=operator.attrgetter("name"), + ) + + +def _related_non_m2m_objects(old_field, new_field): + # Filter out m2m objects from reverse relations. + # Return (old_relation, new_relation) tuples. + related_fields = zip( + ( + obj + for obj in _all_related_fields(old_field.model) + if _is_relevant_relation(obj, old_field) + ), + ( + obj + for obj in _all_related_fields(new_field.model) + if _is_relevant_relation(obj, new_field) + ), + ) + for old_rel, new_rel in related_fields: + yield old_rel, new_rel + yield from _related_non_m2m_objects( + old_rel.remote_field, + new_rel.remote_field, + ) + + +class BaseDatabaseSchemaEditor: + """ + This class and its subclasses are responsible for emitting schema-changing + statements to the databases - model creation/removal/alteration, field + renaming, index fiddling, and so on. + """ + + # Overrideable SQL templates + sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" + sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" + sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" + sql_delete_table = "DROP TABLE %(table)s CASCADE" + + sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" + sql_alter_column = "ALTER TABLE %(table)s %(changes)s" + sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" + sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" + sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" + sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" + sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" + sql_alter_column_no_default_null = sql_alter_column_no_default + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" + sql_rename_column = ( + "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" + ) + sql_update_with_default = ( + "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" + ) + + sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s" + sql_check_constraint = "CHECK (%(check)s)" + sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + sql_constraint = "CONSTRAINT %(name)s %(constraint)s" + + sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" + sql_delete_check = sql_delete_constraint + + sql_create_unique = ( + "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s " + "UNIQUE (%(columns)s)%(deferrable)s" + ) + sql_delete_unique = sql_delete_constraint + + sql_create_fk = ( + "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " + "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s" + ) + sql_create_inline_fk = None + sql_create_column_inline_fk = None + sql_delete_fk = sql_delete_constraint + + sql_create_index = ( + "CREATE INDEX %(name)s ON %(table)s " + "(%(columns)s)%(include)s%(extra)s%(condition)s" + ) + sql_create_unique_index = ( + "CREATE UNIQUE INDEX %(name)s ON %(table)s " + "(%(columns)s)%(include)s%(condition)s" + ) + sql_rename_index = "ALTER INDEX %(old_name)s RENAME TO %(new_name)s" + sql_delete_index = "DROP INDEX %(name)s" + + sql_create_pk = ( + "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + ) + sql_delete_pk = sql_delete_constraint + + sql_delete_procedure = "DROP PROCEDURE %(procedure)s" + + sql_alter_table_comment = "COMMENT ON TABLE %(table)s IS %(comment)s" + sql_alter_column_comment = "COMMENT ON COLUMN %(table)s.%(column)s IS %(comment)s" + + def __init__(self, connection, collect_sql=False, atomic=True): + self.connection = connection + self.collect_sql = collect_sql + if self.collect_sql: + self.collected_sql = [] + self.atomic_migration = self.connection.features.can_rollback_ddl and atomic + + # State-managing methods + + def __enter__(self): + self.deferred_sql = [] + if self.atomic_migration: + self.atomic = atomic(self.connection.alias) + self.atomic.__enter__() + return self + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + for sql in self.deferred_sql: + self.execute(sql) + if self.atomic_migration: + self.atomic.__exit__(exc_type, exc_value, traceback) + + # Core utility functions + + def execute(self, sql, params=()): + """Execute the given SQL statement, with optional parameters.""" + # Don't perform the transactional DDL check if SQL is being collected + # as it's not going to be executed anyway. + if ( + not self.collect_sql + and self.connection.in_atomic_block + and not self.connection.features.can_rollback_ddl + ): + raise TransactionManagementError( + "Executing DDL statements while in a transaction on databases " + "that can't perform a rollback is prohibited." + ) + # Account for non-string statement objects. + sql = str(sql) + # Log the command we're running, then run it + logger.debug( + "%s; (params %r)", sql, params, extra={"params": params, "sql": sql} + ) + if self.collect_sql: + ending = "" if sql.rstrip().endswith(";") else ";" + if params is not None: + self.collected_sql.append( + (sql % tuple(map(self.quote_value, params))) + ending + ) + else: + self.collected_sql.append(sql + ending) + else: + with self.connection.cursor() as cursor: + cursor.execute(sql, params) + + def quote_name(self, name): + return self.connection.ops.quote_name(name) + + def table_sql(self, model): + """Take a model and return its table definition.""" + # Add any unique_togethers (always deferred, as some fields might be + # created afterward, like geometry fields with some backends). + for field_names in model._meta.unique_together: + fields = [model._meta.get_field(field) for field in field_names] + self.deferred_sql.append(self._create_unique_sql(model, fields)) + # Create column SQL, add FK deferreds if needed. + column_sqls = [] + params = [] + for field in model._meta.local_fields: + # SQL. + definition, extra_params = self.column_sql(model, field) + if definition is None: + continue + # Check constraints can go on the column SQL here. + db_params = field.db_parameters(connection=self.connection) + if db_params["check"]: + definition += " " + self.sql_check_constraint % db_params + # Autoincrement SQL (for backends with inline variant). + col_type_suffix = field.db_type_suffix(connection=self.connection) + if col_type_suffix: + definition += " %s" % col_type_suffix + params.extend(extra_params) + # FK. + if field.remote_field and field.db_constraint: + to_table = field.remote_field.model._meta.db_table + to_column = field.remote_field.model._meta.get_field( + field.remote_field.field_name + ).column + if self.sql_create_inline_fk: + definition += " " + self.sql_create_inline_fk % { + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + elif self.connection.features.supports_foreign_keys: + self.deferred_sql.append( + self._create_fk_sql( + model, field, "_fk_%(to_table)s_%(to_column)s" + ) + ) + # Add the SQL to our big list. + column_sqls.append( + "%s %s" + % ( + self.quote_name(field.column), + definition, + ) + ) + # Autoincrement SQL (for backends with post table definition + # variant). + if field.get_internal_type() in ( + "AutoField", + "BigAutoField", + "SmallAutoField", + ): + autoinc_sql = self.connection.ops.autoinc_sql( + model._meta.db_table, field.column + ) + if autoinc_sql: + self.deferred_sql.extend(autoinc_sql) + constraints = [ + constraint.constraint_sql(model, self) + for constraint in model._meta.constraints + ] + sql = self.sql_create_table % { + "table": self.quote_name(model._meta.db_table), + "definition": ", ".join( + str(constraint) + for constraint in (*column_sqls, *constraints) + if constraint + ), + } + if model._meta.db_tablespace: + tablespace_sql = self.connection.ops.tablespace_sql( + model._meta.db_tablespace + ) + if tablespace_sql: + sql += " " + tablespace_sql + return sql, params + + # Field <-> database mapping functions + + def _iter_column_sql( + self, column_db_type, params, model, field, field_db_params, include_default + ): + yield column_db_type + if collation := field_db_params.get("collation"): + yield self._collate_sql(collation) + if self.connection.features.supports_comments_inline and field.db_comment: + yield self._comment_sql(field.db_comment) + # Work out nullability. + null = field.null + # Add database default. + if field.db_default is not NOT_PROVIDED: + default_sql, default_params = self.db_default_sql(field) + yield f"DEFAULT {default_sql}" + params.extend(default_params) + include_default = False + # Include a default value, if requested. + include_default = ( + include_default + and not self.skip_default(field) + and + # Don't include a default value if it's a nullable field and the + # default cannot be dropped in the ALTER COLUMN statement (e.g. + # MySQL longtext and longblob). + not (null and self.skip_default_on_alter(field)) + ) + if include_default: + default_value = self.effective_default(field) + if default_value is not None: + column_default = "DEFAULT " + self._column_default_sql(field) + if self.connection.features.requires_literal_defaults: + # Some databases can't take defaults as a parameter (Oracle). + # If this is the case, the individual schema backend should + # implement prepare_default(). + yield column_default % self.prepare_default(default_value) + else: + yield column_default + params.append(default_value) + # Oracle treats the empty string ('') as null, so coerce the null + # option whenever '' is a possible value. + if ( + field.empty_strings_allowed + and not field.primary_key + and self.connection.features.interprets_empty_strings_as_nulls + ): + null = True + if not null: + yield "NOT NULL" + elif not self.connection.features.implied_column_null: + yield "NULL" + if field.primary_key: + yield "PRIMARY KEY" + elif field.unique: + yield "UNIQUE" + # Optionally add the tablespace if it's an implicitly indexed column. + tablespace = field.db_tablespace or model._meta.db_tablespace + if ( + tablespace + and self.connection.features.supports_tablespaces + and field.unique + ): + yield self.connection.ops.tablespace_sql(tablespace, inline=True) + + def column_sql(self, model, field, include_default=False): + """ + Return the column definition for a field. The field must already have + had set_attributes_from_name() called. + """ + # Get the column's type and use that as the basis of the SQL. + field_db_params = field.db_parameters(connection=self.connection) + column_db_type = field_db_params["type"] + # Check for fields that aren't actually columns (e.g. M2M). + if column_db_type is None: + return None, None + params = [] + return ( + " ".join( + # This appends to the params being returned. + self._iter_column_sql( + column_db_type, + params, + model, + field, + field_db_params, + include_default, + ) + ), + params, + ) + + def skip_default(self, field): + """ + Some backends don't accept default values for certain columns types + (i.e. MySQL longtext and longblob). + """ + return False + + def skip_default_on_alter(self, field): + """ + Some backends don't accept default values for certain columns types + (i.e. MySQL longtext and longblob) in the ALTER COLUMN statement. + """ + return False + + def prepare_default(self, value): + """ + Only used for backends which have requires_literal_defaults feature + """ + raise NotImplementedError( + "subclasses of BaseDatabaseSchemaEditor for backends which have " + "requires_literal_defaults must provide a prepare_default() method" + ) + + def _column_default_sql(self, field): + """ + Return the SQL to use in a DEFAULT clause. The resulting string should + contain a '%s' placeholder for a default value. + """ + return "%s" + + def db_default_sql(self, field): + """Return the sql and params for the field's database default.""" + from django.db.models.expressions import Value + + sql = "%s" if isinstance(field.db_default, Value) else "(%s)" + query = Query(model=field.model) + compiler = query.get_compiler(connection=self.connection) + default_sql, params = compiler.compile(field.db_default) + if self.connection.features.requires_literal_defaults: + # Some databases doesn't support parameterized defaults (Oracle, + # SQLite). If this is the case, the individual schema backend + # should implement prepare_default(). + default_sql %= tuple(self.prepare_default(p) for p in params) + params = [] + return sql % default_sql, params + + @staticmethod + def _effective_default(field): + # This method allows testing its logic without a connection. + if field.has_default(): + default = field.get_default() + elif not field.null and field.blank and field.empty_strings_allowed: + if field.get_internal_type() == "BinaryField": + default = b"" + else: + default = "" + elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False): + internal_type = field.get_internal_type() + if internal_type == "DateTimeField": + default = timezone.now() + else: + default = datetime.now() + if internal_type == "DateField": + default = default.date() + elif internal_type == "TimeField": + default = default.time() + else: + default = None + return default + + def effective_default(self, field): + """Return a field's effective database default value.""" + return field.get_db_prep_save(self._effective_default(field), self.connection) + + def quote_value(self, value): + """ + Return a quoted version of the value so it's safe to use in an SQL + string. This is not safe against injection from user code; it is + intended only for use in making SQL scripts or preparing default values + for particularly tricky backends (defaults are not user-defined, though, + so this is safe). + """ + raise NotImplementedError() + + # Actions + + def create_model(self, model): + """ + Create a table and any accompanying indexes or unique constraints for + the given `model`. + """ + sql, params = self.table_sql(model) + # Prevent using [] as params, in the case a literal '%' is used in the + # definition. + self.execute(sql, params or None) + + if self.connection.features.supports_comments: + # Add table comment. + if model._meta.db_table_comment: + self.alter_db_table_comment(model, None, model._meta.db_table_comment) + # Add column comments. + if not self.connection.features.supports_comments_inline: + for field in model._meta.local_fields: + if field.db_comment: + field_db_params = field.db_parameters( + connection=self.connection + ) + field_type = field_db_params["type"] + self.execute( + *self._alter_column_comment_sql( + model, field, field_type, field.db_comment + ) + ) + # Add any field index and index_together's (deferred as SQLite + # _remake_table needs it). + self.deferred_sql.extend(self._model_indexes_sql(model)) + + # Make M2M tables + for field in model._meta.local_many_to_many: + if field.remote_field.through._meta.auto_created: + self.create_model(field.remote_field.through) + + def delete_model(self, model): + """Delete a model from the database.""" + # Handle auto-created intermediary models + for field in model._meta.local_many_to_many: + if field.remote_field.through._meta.auto_created: + self.delete_model(field.remote_field.through) + + # Delete the table + self.execute( + self.sql_delete_table + % { + "table": self.quote_name(model._meta.db_table), + } + ) + # Remove all deferred statements referencing the deleted table. + for sql in list(self.deferred_sql): + if isinstance(sql, Statement) and sql.references_table( + model._meta.db_table + ): + self.deferred_sql.remove(sql) + + def add_index(self, model, index): + """Add an index on a model.""" + if ( + index.contains_expressions + and not self.connection.features.supports_expression_indexes + ): + return None + # Index.create_sql returns interpolated SQL which makes params=None a + # necessity to avoid escaping attempts on execution. + self.execute(index.create_sql(model, self), params=None) + + def remove_index(self, model, index): + """Remove an index from a model.""" + if ( + index.contains_expressions + and not self.connection.features.supports_expression_indexes + ): + return None + self.execute(index.remove_sql(model, self)) + + def rename_index(self, model, old_index, new_index): + if self.connection.features.can_rename_index: + self.execute( + self._rename_index_sql(model, old_index.name, new_index.name), + params=None, + ) + else: + self.remove_index(model, old_index) + self.add_index(model, new_index) + + def add_constraint(self, model, constraint): + """Add a constraint to a model.""" + sql = constraint.create_sql(model, self) + if sql: + # Constraint.create_sql returns interpolated SQL which makes + # params=None a necessity to avoid escaping attempts on execution. + self.execute(sql, params=None) + + def remove_constraint(self, model, constraint): + """Remove a constraint from a model.""" + sql = constraint.remove_sql(model, self) + if sql: + self.execute(sql) + + def alter_unique_together(self, model, old_unique_together, new_unique_together): + """ + Deal with a model changing its unique_together. The input + unique_togethers must be doubly-nested, not the single-nested + ["foo", "bar"] format. + """ + olds = {tuple(fields) for fields in old_unique_together} + news = {tuple(fields) for fields in new_unique_together} + # Deleted uniques + for fields in olds.difference(news): + self._delete_composed_index( + model, + fields, + {"unique": True, "primary_key": False}, + self.sql_delete_unique, + ) + # Created uniques + for field_names in news.difference(olds): + fields = [model._meta.get_field(field) for field in field_names] + self.execute(self._create_unique_sql(model, fields)) + + def alter_index_together(self, model, old_index_together, new_index_together): + """ + Deal with a model changing its index_together. The input + index_togethers must be doubly-nested, not the single-nested + ["foo", "bar"] format. + """ + olds = {tuple(fields) for fields in old_index_together} + news = {tuple(fields) for fields in new_index_together} + # Deleted indexes + for fields in olds.difference(news): + self._delete_composed_index( + model, + fields, + {"index": True, "unique": False}, + self.sql_delete_index, + ) + # Created indexes + for field_names in news.difference(olds): + fields = [model._meta.get_field(field) for field in field_names] + self.execute(self._create_index_sql(model, fields=fields, suffix="_idx")) + + def _delete_composed_index(self, model, fields, constraint_kwargs, sql): + meta_constraint_names = { + constraint.name for constraint in model._meta.constraints + } + meta_index_names = {constraint.name for constraint in model._meta.indexes} + columns = [model._meta.get_field(field).column for field in fields] + constraint_names = self._constraint_names( + model, + columns, + exclude=meta_constraint_names | meta_index_names, + **constraint_kwargs, + ) + if ( + constraint_kwargs.get("unique") is True + and constraint_names + and self.connection.features.allows_multiple_constraints_on_same_fields + ): + # Constraint matching the unique_together name. + default_name = str( + self._unique_constraint_name(model._meta.db_table, columns, quote=False) + ) + if default_name in constraint_names: + constraint_names = [default_name] + if len(constraint_names) != 1: + raise ValueError( + "Found wrong number (%s) of constraints for %s(%s)" + % ( + len(constraint_names), + model._meta.db_table, + ", ".join(columns), + ) + ) + self.execute(self._delete_constraint_sql(sql, model, constraint_names[0])) + + def alter_db_table(self, model, old_db_table, new_db_table): + """Rename the table a model points to.""" + if old_db_table == new_db_table or ( + self.connection.features.ignores_table_name_case + and old_db_table.lower() == new_db_table.lower() + ): + return + self.execute( + self.sql_rename_table + % { + "old_table": self.quote_name(old_db_table), + "new_table": self.quote_name(new_db_table), + } + ) + # Rename all references to the old table name. + for sql in self.deferred_sql: + if isinstance(sql, Statement): + sql.rename_table_references(old_db_table, new_db_table) + + def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment): + self.execute( + self.sql_alter_table_comment + % { + "table": self.quote_name(model._meta.db_table), + "comment": self.quote_value(new_db_table_comment or ""), + } + ) + + def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): + """Move a model's table between tablespaces.""" + self.execute( + self.sql_retablespace_table + % { + "table": self.quote_name(model._meta.db_table), + "old_tablespace": self.quote_name(old_db_tablespace), + "new_tablespace": self.quote_name(new_db_tablespace), + } + ) + + def add_field(self, model, field): + """ + Create a field on a model. Usually involves adding a column, but may + involve adding a table instead (for M2M fields). + """ + # Special-case implicit M2M tables + if field.many_to_many and field.remote_field.through._meta.auto_created: + return self.create_model(field.remote_field.through) + # Get the column's definition + definition, params = self.column_sql(model, field, include_default=True) + # It might not actually have a column behind it + if definition is None: + return + if col_type_suffix := field.db_type_suffix(connection=self.connection): + definition += f" {col_type_suffix}" + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=self.connection) + if db_params["check"]: + definition += " " + self.sql_check_constraint % db_params + if ( + field.remote_field + and self.connection.features.supports_foreign_keys + and field.db_constraint + ): + constraint_suffix = "_fk_%(to_table)s_%(to_column)s" + # Add FK constraint inline, if supported. + if self.sql_create_column_inline_fk: + to_table = field.remote_field.model._meta.db_table + to_column = field.remote_field.model._meta.get_field( + field.remote_field.field_name + ).column + namespace, _ = split_identifier(model._meta.db_table) + definition += " " + self.sql_create_column_inline_fk % { + "name": self._fk_constraint_name(model, field, constraint_suffix), + "namespace": "%s." % self.quote_name(namespace) + if namespace + else "", + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + "deferrable": self.connection.ops.deferrable_sql(), + } + # Otherwise, add FK constraints later. + else: + self.deferred_sql.append( + self._create_fk_sql(model, field, constraint_suffix) + ) + # Build the SQL and run it + sql = self.sql_create_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "definition": definition, + } + self.execute(sql, params) + # Drop the default if we need to + # (Django usually does not use in-database defaults) + if ( + not self.skip_default_on_alter(field) + and self.effective_default(field) is not None + ): + changes_sql, params = self._alter_column_default_sql( + model, None, field, drop=True + ) + sql = self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": changes_sql, + } + self.execute(sql, params) + # Add field comment, if required. + if ( + field.db_comment + and self.connection.features.supports_comments + and not self.connection.features.supports_comments_inline + ): + field_type = db_params["type"] + self.execute( + *self._alter_column_comment_sql( + model, field, field_type, field.db_comment + ) + ) + # Add an index, if required + self.deferred_sql.extend(self._field_indexes_sql(model, field)) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + + def remove_field(self, model, field): + """ + Remove a field from a model. Usually involves deleting a column, + but for M2Ms may involve deleting a table. + """ + # Special-case implicit M2M tables + if field.many_to_many and field.remote_field.through._meta.auto_created: + return self.delete_model(field.remote_field.through) + # It might not actually have a column behind it + if field.db_parameters(connection=self.connection)["type"] is None: + return + # Drop any FK constraints, MySQL requires explicit deletion + if field.remote_field: + fk_names = self._constraint_names(model, [field.column], foreign_key=True) + for fk_name in fk_names: + self.execute(self._delete_fk_sql(model, fk_name)) + # Delete the column + sql = self.sql_delete_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + } + self.execute(sql) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + # Remove all deferred statements referencing the deleted column. + for sql in list(self.deferred_sql): + if isinstance(sql, Statement) and sql.references_column( + model._meta.db_table, field.column + ): + self.deferred_sql.remove(sql) + + def alter_field(self, model, old_field, new_field, strict=False): + """ + Allow a field's type, uniqueness, nullability, default, column, + constraints, etc. to be modified. + `old_field` is required to compute the necessary changes. + If `strict` is True, raise errors if the old column does not match + `old_field` precisely. + """ + if not self._field_should_be_altered(old_field, new_field): + return + # Ensure this field is even column-based + old_db_params = old_field.db_parameters(connection=self.connection) + old_type = old_db_params["type"] + new_db_params = new_field.db_parameters(connection=self.connection) + new_type = new_db_params["type"] + if (old_type is None and old_field.remote_field is None) or ( + new_type is None and new_field.remote_field is None + ): + raise ValueError( + "Cannot alter field %s into %s - they do not properly define " + "db_type (are you using a badly-written custom field?)" + % (old_field, new_field), + ) + elif ( + old_type is None + and new_type is None + and ( + old_field.remote_field.through + and new_field.remote_field.through + and old_field.remote_field.through._meta.auto_created + and new_field.remote_field.through._meta.auto_created + ) + ): + return self._alter_many_to_many(model, old_field, new_field, strict) + elif ( + old_type is None + and new_type is None + and ( + old_field.remote_field.through + and new_field.remote_field.through + and not old_field.remote_field.through._meta.auto_created + and not new_field.remote_field.through._meta.auto_created + ) + ): + # Both sides have through models; this is a no-op. + return + elif old_type is None or new_type is None: + raise ValueError( + "Cannot alter field %s into %s - they are not compatible types " + "(you cannot alter to or from M2M fields, or add or remove " + "through= on M2M fields)" % (old_field, new_field) + ) + + self._alter_field( + model, + old_field, + new_field, + old_type, + new_type, + old_db_params, + new_db_params, + strict, + ) + + def _field_db_check(self, field, field_db_params): + # Always check constraints with the same mocked column name to avoid + # recreating constrains when the column is renamed. + check_constraints = self.connection.data_type_check_constraints + data = field.db_type_parameters(self.connection) + data["column"] = "__column_name__" + try: + return check_constraints[field.get_internal_type()] % data + except KeyError: + return None + + def _alter_field( + self, + model, + old_field, + new_field, + old_type, + new_type, + old_db_params, + new_db_params, + strict=False, + ): + """Perform a "physical" (non-ManyToMany) field update.""" + # Drop any FK constraints, we'll remake them later + fks_dropped = set() + if ( + self.connection.features.supports_foreign_keys + and old_field.remote_field + and old_field.db_constraint + and self._field_should_be_altered( + old_field, + new_field, + ignore={"db_comment"}, + ) + ): + fk_names = self._constraint_names( + model, [old_field.column], foreign_key=True + ) + if strict and len(fk_names) != 1: + raise ValueError( + "Found wrong number (%s) of foreign key constraints for %s.%s" + % ( + len(fk_names), + model._meta.db_table, + old_field.column, + ) + ) + for fk_name in fk_names: + fks_dropped.add((old_field.column,)) + self.execute(self._delete_fk_sql(model, fk_name)) + # Has unique been removed? + if old_field.unique and ( + not new_field.unique or self._field_became_primary_key(old_field, new_field) + ): + # Find the unique constraint for this field + meta_constraint_names = { + constraint.name for constraint in model._meta.constraints + } + constraint_names = self._constraint_names( + model, + [old_field.column], + unique=True, + primary_key=False, + exclude=meta_constraint_names, + ) + if strict and len(constraint_names) != 1: + raise ValueError( + "Found wrong number (%s) of unique constraints for %s.%s" + % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + ) + ) + for constraint_name in constraint_names: + self.execute(self._delete_unique_sql(model, constraint_name)) + # Drop incoming FK constraints if the field is a primary key or unique, + # which might be a to_field target, and things are going to change. + old_collation = old_db_params.get("collation") + new_collation = new_db_params.get("collation") + drop_foreign_keys = ( + self.connection.features.supports_foreign_keys + and ( + (old_field.primary_key and new_field.primary_key) + or (old_field.unique and new_field.unique) + ) + and ((old_type != new_type) or (old_collation != new_collation)) + ) + if drop_foreign_keys: + # '_meta.related_field' also contains M2M reverse fields, these + # will be filtered out + for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): + rel_fk_names = self._constraint_names( + new_rel.related_model, [new_rel.field.column], foreign_key=True + ) + for fk_name in rel_fk_names: + self.execute(self._delete_fk_sql(new_rel.related_model, fk_name)) + # Removed an index? (no strict check, as multiple indexes are possible) + # Remove indexes if db_index switched to False or a unique constraint + # will now be used in lieu of an index. The following lines from the + # truth table show all True cases; the rest are False: + # + # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique + # ------------------------------------------------------------------------------ + # True | False | False | False + # True | False | False | True + # True | False | True | True + if ( + old_field.db_index + and not old_field.unique + and (not new_field.db_index or new_field.unique) + ): + # Find the index for this field + meta_index_names = {index.name for index in model._meta.indexes} + # Retrieve only BTREE indexes since this is what's created with + # db_index=True. + index_names = self._constraint_names( + model, + [old_field.column], + index=True, + type_=Index.suffix, + exclude=meta_index_names, + ) + for index_name in index_names: + # The only way to check if an index was created with + # db_index=True or with Index(['field'], name='foo') + # is to look at its name (refs #28053). + self.execute(self._delete_index_sql(model, index_name)) + # Change check constraints? + old_db_check = self._field_db_check(old_field, old_db_params) + new_db_check = self._field_db_check(new_field, new_db_params) + if old_db_check != new_db_check and old_db_check: + meta_constraint_names = { + constraint.name for constraint in model._meta.constraints + } + constraint_names = self._constraint_names( + model, + [old_field.column], + check=True, + exclude=meta_constraint_names, + ) + if strict and len(constraint_names) != 1: + raise ValueError( + "Found wrong number (%s) of check constraints for %s.%s" + % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + ) + ) + for constraint_name in constraint_names: + self.execute(self._delete_check_sql(model, constraint_name)) + # Have they renamed the column? + if old_field.column != new_field.column: + self.execute( + self._rename_field_sql( + model._meta.db_table, old_field, new_field, new_type + ) + ) + # Rename all references to the renamed column. + for sql in self.deferred_sql: + if isinstance(sql, Statement): + sql.rename_column_references( + model._meta.db_table, old_field.column, new_field.column + ) + # Next, start accumulating actions to do + actions = [] + null_actions = [] + post_actions = [] + # Type suffix change? (e.g. auto increment). + old_type_suffix = old_field.db_type_suffix(connection=self.connection) + new_type_suffix = new_field.db_type_suffix(connection=self.connection) + # Type, collation, or comment change? + if ( + old_type != new_type + or old_type_suffix != new_type_suffix + or old_collation != new_collation + or ( + self.connection.features.supports_comments + and old_field.db_comment != new_field.db_comment + ) + ): + fragment, other_actions = self._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + actions.append(fragment) + post_actions.extend(other_actions) + + if new_field.db_default is not NOT_PROVIDED: + if ( + old_field.db_default is NOT_PROVIDED + or new_field.db_default != old_field.db_default + ): + actions.append( + self._alter_column_database_default_sql(model, old_field, new_field) + ) + elif old_field.db_default is not NOT_PROVIDED: + actions.append( + self._alter_column_database_default_sql( + model, old_field, new_field, drop=True + ) + ) + # When changing a column NULL constraint to NOT NULL with a given + # default value, we need to perform 4 steps: + # 1. Add a default for new incoming writes + # 2. Update existing NULL rows with new default + # 3. Replace NULL constraint with NOT NULL + # 4. Drop the default again. + # Default change? + needs_database_default = False + if ( + old_field.null + and not new_field.null + and new_field.db_default is NOT_PROVIDED + ): + old_default = self.effective_default(old_field) + new_default = self.effective_default(new_field) + if ( + not self.skip_default_on_alter(new_field) + and old_default != new_default + and new_default is not None + ): + needs_database_default = True + actions.append( + self._alter_column_default_sql(model, old_field, new_field) + ) + # Nullability change? + if old_field.null != new_field.null: + fragment = self._alter_column_null_sql(model, old_field, new_field) + if fragment: + null_actions.append(fragment) + # Only if we have a default and there is a change from NULL to NOT NULL + four_way_default_alteration = ( + new_field.has_default() or new_field.db_default is not NOT_PROVIDED + ) and (old_field.null and not new_field.null) + if actions or null_actions: + if not four_way_default_alteration: + # If we don't have to do a 4-way default alteration we can + # directly run a (NOT) NULL alteration + actions += null_actions + # Combine actions together if we can (e.g. postgres) + if self.connection.features.supports_combined_alters and actions: + sql, params = tuple(zip(*actions)) + actions = [(", ".join(sql), sum(params, []))] + # Apply those actions + for sql, params in actions: + self.execute( + self.sql_alter_column + % { + "table": self.quote_name(model._meta.db_table), + "changes": sql, + }, + params, + ) + if four_way_default_alteration: + if new_field.db_default is NOT_PROVIDED: + default_sql = "%s" + params = [new_default] + else: + default_sql, params = self.db_default_sql(new_field) + # Update existing rows with default value + self.execute( + self.sql_update_with_default + % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(new_field.column), + "default": default_sql, + }, + params, + ) + # Since we didn't run a NOT NULL change before we need to do it + # now + for sql, params in null_actions: + self.execute( + self.sql_alter_column + % { + "table": self.quote_name(model._meta.db_table), + "changes": sql, + }, + params, + ) + if post_actions: + for sql, params in post_actions: + self.execute(sql, params) + # If primary_key changed to False, delete the primary key constraint. + if old_field.primary_key and not new_field.primary_key: + self._delete_primary_key(model, strict) + # Added a unique? + if self._unique_should_be_added(old_field, new_field): + self.execute(self._create_unique_sql(model, [new_field])) + # Added an index? Add an index if db_index switched to True or a unique + # constraint will no longer be used in lieu of an index. The following + # lines from the truth table show all True cases; the rest are False: + # + # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique + # ------------------------------------------------------------------------------ + # False | False | True | False + # False | True | True | False + # True | True | True | False + if ( + (not old_field.db_index or old_field.unique) + and new_field.db_index + and not new_field.unique + ): + self.execute(self._create_index_sql(model, fields=[new_field])) + # Type alteration on primary key? Then we need to alter the column + # referring to us. + rels_to_update = [] + if drop_foreign_keys: + rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) + # Changed to become primary key? + if self._field_became_primary_key(old_field, new_field): + # Make the new one + self.execute(self._create_primary_key_sql(model, new_field)) + # Update all referencing columns + rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) + # Handle our type alters on the other end of rels from the PK stuff above + for old_rel, new_rel in rels_to_update: + rel_db_params = new_rel.field.db_parameters(connection=self.connection) + rel_type = rel_db_params["type"] + rel_collation = rel_db_params.get("collation") + old_rel_db_params = old_rel.field.db_parameters(connection=self.connection) + old_rel_collation = old_rel_db_params.get("collation") + fragment, other_actions = self._alter_column_type_sql( + new_rel.related_model, + old_rel.field, + new_rel.field, + rel_type, + old_rel_collation, + rel_collation, + ) + self.execute( + self.sql_alter_column + % { + "table": self.quote_name(new_rel.related_model._meta.db_table), + "changes": fragment[0], + }, + fragment[1], + ) + for sql, params in other_actions: + self.execute(sql, params) + # Does it have a foreign key? + if ( + self.connection.features.supports_foreign_keys + and new_field.remote_field + and ( + fks_dropped or not old_field.remote_field or not old_field.db_constraint + ) + and new_field.db_constraint + ): + self.execute( + self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s") + ) + # Rebuild FKs that pointed to us if we previously had to drop them + if drop_foreign_keys: + for _, rel in rels_to_update: + if rel.field.db_constraint: + self.execute( + self._create_fk_sql(rel.related_model, rel.field, "_fk") + ) + # Does it have check constraints we need to add? + if old_db_check != new_db_check and new_db_check: + constraint_name = self._create_index_name( + model._meta.db_table, [new_field.column], suffix="_check" + ) + self.execute( + self._create_check_sql(model, constraint_name, new_db_params["check"]) + ) + # Drop the default if we need to + # (Django usually does not use in-database defaults) + if needs_database_default: + changes_sql, params = self._alter_column_default_sql( + model, old_field, new_field, drop=True + ) + sql = self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": changes_sql, + } + self.execute(sql, params) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + + def _alter_column_null_sql(self, model, old_field, new_field): + """ + Hook to specialize column null alteration. + + Return a (sql, params) fragment to set a column to null or non-null + as required by new_field, or None if no changes are required. + """ + if ( + self.connection.features.interprets_empty_strings_as_nulls + and new_field.empty_strings_allowed + ): + # The field is nullable in the database anyway, leave it alone. + return + else: + new_db_params = new_field.db_parameters(connection=self.connection) + sql = ( + self.sql_alter_column_null + if new_field.null + else self.sql_alter_column_not_null + ) + return ( + sql + % { + "column": self.quote_name(new_field.column), + "type": new_db_params["type"], + }, + [], + ) + + def _alter_column_default_sql(self, model, old_field, new_field, drop=False): + """ + Hook to specialize column default alteration. + + Return a (sql, params) fragment to add or drop (depending on the drop + argument) a default to new_field's column. + """ + new_default = self.effective_default(new_field) + default = self._column_default_sql(new_field) + params = [new_default] + + if drop: + params = [] + elif self.connection.features.requires_literal_defaults: + # Some databases (Oracle) can't take defaults as a parameter + # If this is the case, the SchemaEditor for that database should + # implement prepare_default(). + default = self.prepare_default(new_default) + params = [] + + new_db_params = new_field.db_parameters(connection=self.connection) + if drop: + if new_field.null: + sql = self.sql_alter_column_no_default_null + else: + sql = self.sql_alter_column_no_default + else: + sql = self.sql_alter_column_default + return ( + sql + % { + "column": self.quote_name(new_field.column), + "type": new_db_params["type"], + "default": default, + }, + params, + ) + + def _alter_column_database_default_sql( + self, model, old_field, new_field, drop=False + ): + """ + Hook to specialize column database default alteration. + + Return a (sql, params) fragment to add or drop (depending on the drop + argument) a default to new_field's column. + """ + if drop: + sql = self.sql_alter_column_no_default + default_sql = "" + params = [] + else: + sql = self.sql_alter_column_default + default_sql, params = self.db_default_sql(new_field) + + new_db_params = new_field.db_parameters(connection=self.connection) + return ( + sql + % { + "column": self.quote_name(new_field.column), + "type": new_db_params["type"], + "default": default_sql, + }, + params, + ) + + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): + """ + Hook to specialize column type alteration for different backends, + for cases when a creation type is different to an alteration type + (e.g. SERIAL in PostgreSQL, PostGIS fields). + + Return a 2-tuple of: an SQL fragment of (sql, params) to insert into + an ALTER TABLE statement and a list of extra (sql, params) tuples to + run once the field is altered. + """ + other_actions = [] + if collate_sql := self._collate_sql( + new_collation, old_collation, model._meta.db_table + ): + collate_sql = f" {collate_sql}" + else: + collate_sql = "" + # Comment change? + comment_sql = "" + if self.connection.features.supports_comments and not new_field.many_to_many: + if old_field.db_comment != new_field.db_comment: + # PostgreSQL and Oracle can't execute 'ALTER COLUMN ...' and + # 'COMMENT ON ...' at the same time. + sql, params = self._alter_column_comment_sql( + model, new_field, new_type, new_field.db_comment + ) + if sql: + other_actions.append((sql, params)) + if new_field.db_comment: + comment_sql = self._comment_sql(new_field.db_comment) + return ( + ( + self.sql_alter_column_type + % { + "column": self.quote_name(new_field.column), + "type": new_type, + "collation": collate_sql, + "comment": comment_sql, + }, + [], + ), + other_actions, + ) + + def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): + return ( + self.sql_alter_column_comment + % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(new_field.column), + "comment": self._comment_sql(new_db_comment), + }, + [], + ) + + def _comment_sql(self, comment): + return self.quote_value(comment or "") + + def _alter_many_to_many(self, model, old_field, new_field, strict): + """Alter M2Ms to repoint their to= endpoints.""" + # Rename the through table + if ( + old_field.remote_field.through._meta.db_table + != new_field.remote_field.through._meta.db_table + ): + self.alter_db_table( + old_field.remote_field.through, + old_field.remote_field.through._meta.db_table, + new_field.remote_field.through._meta.db_table, + ) + # Repoint the FK to the other side + self.alter_field( + new_field.remote_field.through, + # The field that points to the target model is needed, so we can + # tell alter_field to change it - this is m2m_reverse_field_name() + # (as opposed to m2m_field_name(), which points to our model). + old_field.remote_field.through._meta.get_field( + old_field.m2m_reverse_field_name() + ), + new_field.remote_field.through._meta.get_field( + new_field.m2m_reverse_field_name() + ), + ) + self.alter_field( + new_field.remote_field.through, + # for self-referential models we need to alter field from the other end too + old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()), + new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()), + ) + + def _create_index_name(self, table_name, column_names, suffix=""): + """ + Generate a unique name for an index/unique constraint. + + The name is divided into 3 parts: the table name, the column names, + and a unique digest and suffix. + """ + _, table_name = split_identifier(table_name) + hash_suffix_part = "%s%s" % ( + names_digest(table_name, *column_names, length=8), + suffix, + ) + max_length = self.connection.ops.max_name_length() or 200 + # If everything fits into max_length, use that name. + index_name = "%s_%s_%s" % (table_name, "_".join(column_names), hash_suffix_part) + if len(index_name) <= max_length: + return index_name + # Shorten a long suffix. + if len(hash_suffix_part) > max_length / 3: + hash_suffix_part = hash_suffix_part[: max_length // 3] + other_length = (max_length - len(hash_suffix_part)) // 2 - 1 + index_name = "%s_%s_%s" % ( + table_name[:other_length], + "_".join(column_names)[:other_length], + hash_suffix_part, + ) + # Prepend D if needed to prevent the name from starting with an + # underscore or a number (not permitted on Oracle). + if index_name[0] == "_" or index_name[0].isdigit(): + index_name = "D%s" % index_name[:-1] + return index_name + + def _get_index_tablespace_sql(self, model, fields, db_tablespace=None): + if db_tablespace is None: + if len(fields) == 1 and fields[0].db_tablespace: + db_tablespace = fields[0].db_tablespace + elif settings.DEFAULT_INDEX_TABLESPACE: + db_tablespace = settings.DEFAULT_INDEX_TABLESPACE + elif model._meta.db_tablespace: + db_tablespace = model._meta.db_tablespace + if db_tablespace is not None: + return " " + self.connection.ops.tablespace_sql(db_tablespace) + return "" + + def _index_condition_sql(self, condition): + if condition: + return " WHERE " + condition + return "" + + def _index_include_sql(self, model, columns): + if not columns or not self.connection.features.supports_covering_indexes: + return "" + return Statement( + " INCLUDE (%(columns)s)", + columns=Columns(model._meta.db_table, columns, self.quote_name), + ) + + def _create_index_sql( + self, + model, + *, + fields=None, + name=None, + suffix="", + using="", + db_tablespace=None, + col_suffixes=(), + sql=None, + opclasses=(), + condition=None, + include=None, + expressions=None, + ): + """ + Return the SQL statement to create the index for one or several fields + or expressions. `sql` can be specified if the syntax differs from the + standard (GIS indexes, ...). + """ + fields = fields or [] + expressions = expressions or [] + compiler = Query(model, alias_cols=False).get_compiler( + connection=self.connection, + ) + tablespace_sql = self._get_index_tablespace_sql( + model, fields, db_tablespace=db_tablespace + ) + columns = [field.column for field in fields] + sql_create_index = sql or self.sql_create_index + table = model._meta.db_table + + def create_index_name(*args, **kwargs): + nonlocal name + if name is None: + name = self._create_index_name(*args, **kwargs) + return self.quote_name(name) + + return Statement( + sql_create_index, + table=Table(table, self.quote_name), + name=IndexName(table, columns, suffix, create_index_name), + using=using, + columns=( + self._index_columns(table, columns, col_suffixes, opclasses) + if columns + else Expressions(table, expressions, compiler, self.quote_value) + ), + extra=tablespace_sql, + condition=self._index_condition_sql(condition), + include=self._index_include_sql(model, include), + ) + + def _delete_index_sql(self, model, name, sql=None): + return Statement( + sql or self.sql_delete_index, + table=Table(model._meta.db_table, self.quote_name), + name=self.quote_name(name), + ) + + def _rename_index_sql(self, model, old_name, new_name): + return Statement( + self.sql_rename_index, + table=Table(model._meta.db_table, self.quote_name), + old_name=self.quote_name(old_name), + new_name=self.quote_name(new_name), + ) + + def _index_columns(self, table, columns, col_suffixes, opclasses): + return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes) + + def _model_indexes_sql(self, model): + """ + Return a list of all index SQL statements (field indexes, + index_together, Meta.indexes) for the specified model. + """ + if not model._meta.managed or model._meta.proxy or model._meta.swapped: + return [] + output = [] + for field in model._meta.local_fields: + output.extend(self._field_indexes_sql(model, field)) + + # RemovedInDjango51Warning. + for field_names in model._meta.index_together: + fields = [model._meta.get_field(field) for field in field_names] + output.append(self._create_index_sql(model, fields=fields, suffix="_idx")) + + for index in model._meta.indexes: + if ( + not index.contains_expressions + or self.connection.features.supports_expression_indexes + ): + output.append(index.create_sql(model, self)) + return output + + def _field_indexes_sql(self, model, field): + """ + Return a list of all index SQL statements for the specified field. + """ + output = [] + if self._field_should_be_indexed(model, field): + output.append(self._create_index_sql(model, fields=[field])) + return output + + def _field_should_be_altered(self, old_field, new_field, ignore=None): + ignore = ignore or set() + _, old_path, old_args, old_kwargs = old_field.deconstruct() + _, new_path, new_args, new_kwargs = new_field.deconstruct() + # Don't alter when: + # - changing only a field name + # - changing an attribute that doesn't affect the schema + # - changing an attribute in the provided set of ignored attributes + # - adding only a db_column and the column name is not changed + for attr in ignore.union(old_field.non_db_attrs): + old_kwargs.pop(attr, None) + for attr in ignore.union(new_field.non_db_attrs): + new_kwargs.pop(attr, None) + return self.quote_name(old_field.column) != self.quote_name( + new_field.column + ) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs) + + def _field_should_be_indexed(self, model, field): + return field.db_index and not field.unique + + def _field_became_primary_key(self, old_field, new_field): + return not old_field.primary_key and new_field.primary_key + + def _unique_should_be_added(self, old_field, new_field): + return ( + not new_field.primary_key + and new_field.unique + and (not old_field.unique or old_field.primary_key) + ) + + def _rename_field_sql(self, table, old_field, new_field, new_type): + return self.sql_rename_column % { + "table": self.quote_name(table), + "old_column": self.quote_name(old_field.column), + "new_column": self.quote_name(new_field.column), + "type": new_type, + } + + def _create_fk_sql(self, model, field, suffix): + table = Table(model._meta.db_table, self.quote_name) + name = self._fk_constraint_name(model, field, suffix) + column = Columns(model._meta.db_table, [field.column], self.quote_name) + to_table = Table(field.target_field.model._meta.db_table, self.quote_name) + to_column = Columns( + field.target_field.model._meta.db_table, + [field.target_field.column], + self.quote_name, + ) + deferrable = self.connection.ops.deferrable_sql() + return Statement( + self.sql_create_fk, + table=table, + name=name, + column=column, + to_table=to_table, + to_column=to_column, + deferrable=deferrable, + ) + + def _fk_constraint_name(self, model, field, suffix): + def create_fk_name(*args, **kwargs): + return self.quote_name(self._create_index_name(*args, **kwargs)) + + return ForeignKeyName( + model._meta.db_table, + [field.column], + split_identifier(field.target_field.model._meta.db_table)[1], + [field.target_field.column], + suffix, + create_fk_name, + ) + + def _delete_fk_sql(self, model, name): + return self._delete_constraint_sql(self.sql_delete_fk, model, name) + + def _deferrable_constraint_sql(self, deferrable): + if deferrable is None: + return "" + if deferrable == Deferrable.DEFERRED: + return " DEFERRABLE INITIALLY DEFERRED" + if deferrable == Deferrable.IMMEDIATE: + return " DEFERRABLE INITIALLY IMMEDIATE" + + def _unique_sql( + self, + model, + fields, + name, + condition=None, + deferrable=None, + include=None, + opclasses=None, + expressions=None, + ): + if ( + deferrable + and not self.connection.features.supports_deferrable_unique_constraints + ): + return None + if condition or include or opclasses or expressions: + # Databases support conditional, covering, and functional unique + # constraints via a unique index. + sql = self._create_unique_sql( + model, + fields, + name=name, + condition=condition, + include=include, + opclasses=opclasses, + expressions=expressions, + ) + if sql: + self.deferred_sql.append(sql) + return None + constraint = self.sql_unique_constraint % { + "columns": ", ".join([self.quote_name(field.column) for field in fields]), + "deferrable": self._deferrable_constraint_sql(deferrable), + } + return self.sql_constraint % { + "name": self.quote_name(name), + "constraint": constraint, + } + + def _create_unique_sql( + self, + model, + fields, + name=None, + condition=None, + deferrable=None, + include=None, + opclasses=None, + expressions=None, + ): + if ( + ( + deferrable + and not self.connection.features.supports_deferrable_unique_constraints + ) + or (condition and not self.connection.features.supports_partial_indexes) + or (include and not self.connection.features.supports_covering_indexes) + or ( + expressions and not self.connection.features.supports_expression_indexes + ) + ): + return None + + compiler = Query(model, alias_cols=False).get_compiler( + connection=self.connection + ) + table = model._meta.db_table + columns = [field.column for field in fields] + if name is None: + name = self._unique_constraint_name(table, columns, quote=True) + else: + name = self.quote_name(name) + if condition or include or opclasses or expressions: + sql = self.sql_create_unique_index + else: + sql = self.sql_create_unique + if columns: + columns = self._index_columns( + table, columns, col_suffixes=(), opclasses=opclasses + ) + else: + columns = Expressions(table, expressions, compiler, self.quote_value) + return Statement( + sql, + table=Table(table, self.quote_name), + name=name, + columns=columns, + condition=self._index_condition_sql(condition), + deferrable=self._deferrable_constraint_sql(deferrable), + include=self._index_include_sql(model, include), + ) + + def _unique_constraint_name(self, table, columns, quote=True): + if quote: + + def create_unique_name(*args, **kwargs): + return self.quote_name(self._create_index_name(*args, **kwargs)) + + else: + create_unique_name = self._create_index_name + + return IndexName(table, columns, "_uniq", create_unique_name) + + def _delete_unique_sql( + self, + model, + name, + condition=None, + deferrable=None, + include=None, + opclasses=None, + expressions=None, + ): + if ( + ( + deferrable + and not self.connection.features.supports_deferrable_unique_constraints + ) + or (condition and not self.connection.features.supports_partial_indexes) + or (include and not self.connection.features.supports_covering_indexes) + or ( + expressions and not self.connection.features.supports_expression_indexes + ) + ): + return None + if condition or include or opclasses or expressions: + sql = self.sql_delete_index + else: + sql = self.sql_delete_unique + return self._delete_constraint_sql(sql, model, name) + + def _check_sql(self, name, check): + return self.sql_constraint % { + "name": self.quote_name(name), + "constraint": self.sql_check_constraint % {"check": check}, + } + + def _create_check_sql(self, model, name, check): + if not self.connection.features.supports_table_check_constraints: + return None + return Statement( + self.sql_create_check, + table=Table(model._meta.db_table, self.quote_name), + name=self.quote_name(name), + check=check, + ) + + def _delete_check_sql(self, model, name): + if not self.connection.features.supports_table_check_constraints: + return None + return self._delete_constraint_sql(self.sql_delete_check, model, name) + + def _delete_constraint_sql(self, template, model, name): + return Statement( + template, + table=Table(model._meta.db_table, self.quote_name), + name=self.quote_name(name), + ) + + def _constraint_names( + self, + model, + column_names=None, + unique=None, + primary_key=None, + index=None, + foreign_key=None, + check=None, + type_=None, + exclude=None, + ): + """Return all constraint names matching the columns and conditions.""" + if column_names is not None: + column_names = [ + self.connection.introspection.identifier_converter( + truncate_name(name, self.connection.ops.max_name_length()) + ) + if self.connection.features.truncates_names + else self.connection.introspection.identifier_converter(name) + for name in column_names + ] + with self.connection.cursor() as cursor: + constraints = self.connection.introspection.get_constraints( + cursor, model._meta.db_table + ) + result = [] + for name, infodict in constraints.items(): + if column_names is None or column_names == infodict["columns"]: + if unique is not None and infodict["unique"] != unique: + continue + if primary_key is not None and infodict["primary_key"] != primary_key: + continue + if index is not None and infodict["index"] != index: + continue + if check is not None and infodict["check"] != check: + continue + if foreign_key is not None and not infodict["foreign_key"]: + continue + if type_ is not None and infodict["type"] != type_: + continue + if not exclude or name not in exclude: + result.append(name) + return result + + def _delete_primary_key(self, model, strict=False): + constraint_names = self._constraint_names(model, primary_key=True) + if strict and len(constraint_names) != 1: + raise ValueError( + "Found wrong number (%s) of PK constraints for %s" + % ( + len(constraint_names), + model._meta.db_table, + ) + ) + for constraint_name in constraint_names: + self.execute(self._delete_primary_key_sql(model, constraint_name)) + + def _create_primary_key_sql(self, model, field): + return Statement( + self.sql_create_pk, + table=Table(model._meta.db_table, self.quote_name), + name=self.quote_name( + self._create_index_name( + model._meta.db_table, [field.column], suffix="_pk" + ) + ), + columns=Columns(model._meta.db_table, [field.column], self.quote_name), + ) + + def _delete_primary_key_sql(self, model, name): + return self._delete_constraint_sql(self.sql_delete_pk, model, name) + + def _collate_sql(self, collation, old_collation=None, table_name=None): + return "COLLATE " + self.quote_name(collation) if collation else "" + + def remove_procedure(self, procedure_name, param_types=()): + sql = self.sql_delete_procedure % { + "procedure": self.quote_name(procedure_name), + "param_types": ",".join(param_types), + } + self.execute(sql) diff --git a/testbed/django__django/django/db/backends/base/validation.py b/testbed/django__django/django/db/backends/base/validation.py new file mode 100644 index 0000000000000000000000000000000000000000..d0e3e2157d694c5d5cb23b7bcfcd39873862a139 --- /dev/null +++ b/testbed/django__django/django/db/backends/base/validation.py @@ -0,0 +1,29 @@ +class BaseDatabaseValidation: + """Encapsulate backend-specific validation.""" + + def __init__(self, connection): + self.connection = connection + + def check(self, **kwargs): + return [] + + def check_field(self, field, **kwargs): + errors = [] + # Backends may implement a check_field_type() method. + if ( + hasattr(self, "check_field_type") + and + # Ignore any related fields. + not getattr(field, "remote_field", None) + ): + # Ignore fields with unsupported features. + db_supports_all_required_features = all( + getattr(self.connection.features, feature, False) + for feature in field.model._meta.required_db_features + ) + if db_supports_all_required_features: + field_type = field.db_type(self.connection) + # Ignore non-concrete fields. + if field_type is not None: + errors.extend(self.check_field_type(field, field_type)) + return errors diff --git a/testbed/django__django/django/db/backends/dummy/__init__.py b/testbed/django__django/django/db/backends/dummy/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/db/backends/dummy/base.py b/testbed/django__django/django/db/backends/dummy/base.py new file mode 100644 index 0000000000000000000000000000000000000000..36c6480a7858286679dbb8cbd6b0435bbd096149 --- /dev/null +++ b/testbed/django__django/django/db/backends/dummy/base.py @@ -0,0 +1,74 @@ +""" +Dummy database backend for Django. + +Django uses this if the database ENGINE setting is empty (None or empty string). + +Each of these API functions, except connection.close(), raise +ImproperlyConfigured. +""" + +from django.core.exceptions import ImproperlyConfigured +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.base.client import BaseDatabaseClient +from django.db.backends.base.creation import BaseDatabaseCreation +from django.db.backends.base.introspection import BaseDatabaseIntrospection +from django.db.backends.base.operations import BaseDatabaseOperations +from django.db.backends.dummy.features import DummyDatabaseFeatures + + +def complain(*args, **kwargs): + raise ImproperlyConfigured( + "settings.DATABASES is improperly configured. " + "Please supply the ENGINE value. Check " + "settings documentation for more details." + ) + + +def ignore(*args, **kwargs): + pass + + +class DatabaseOperations(BaseDatabaseOperations): + quote_name = complain + + +class DatabaseClient(BaseDatabaseClient): + runshell = complain + + +class DatabaseCreation(BaseDatabaseCreation): + create_test_db = ignore + destroy_test_db = ignore + + +class DatabaseIntrospection(BaseDatabaseIntrospection): + get_table_list = complain + get_table_description = complain + get_relations = complain + get_indexes = complain + + +class DatabaseWrapper(BaseDatabaseWrapper): + operators = {} + # Override the base class implementations with null + # implementations. Anything that tries to actually + # do something raises complain; anything that tries + # to rollback or undo something raises ignore. + _cursor = complain + ensure_connection = complain + _commit = complain + _rollback = ignore + _close = ignore + _savepoint = ignore + _savepoint_commit = complain + _savepoint_rollback = ignore + _set_autocommit = complain + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DummyDatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + + def is_usable(self): + return True diff --git a/testbed/django__django/django/db/backends/dummy/features.py b/testbed/django__django/django/db/backends/dummy/features.py new file mode 100644 index 0000000000000000000000000000000000000000..ac91731969c9d1b89975214bf308e8ebbc8787e5 --- /dev/null +++ b/testbed/django__django/django/db/backends/dummy/features.py @@ -0,0 +1,6 @@ +from django.db.backends.base.features import BaseDatabaseFeatures + + +class DummyDatabaseFeatures(BaseDatabaseFeatures): + supports_transactions = False + uses_savepoints = False diff --git a/testbed/django__django/django/db/backends/mysql/__init__.py b/testbed/django__django/django/db/backends/mysql/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/db/backends/mysql/base.py b/testbed/django__django/django/db/backends/mysql/base.py new file mode 100644 index 0000000000000000000000000000000000000000..6811b10abb234b263ef6460af4d4c8bf83b9711a --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/base.py @@ -0,0 +1,444 @@ +""" +MySQL database backend for Django. + +Requires mysqlclient: https://pypi.org/project/mysqlclient/ +""" +from django.core.exceptions import ImproperlyConfigured +from django.db import IntegrityError +from django.db.backends import utils as backend_utils +from django.db.backends.base.base import BaseDatabaseWrapper +from django.utils.asyncio import async_unsafe +from django.utils.functional import cached_property +from django.utils.regex_helper import _lazy_re_compile + +try: + import MySQLdb as Database +except ImportError as err: + raise ImproperlyConfigured( + "Error loading MySQLdb module.\nDid you install mysqlclient?" + ) from err + +from MySQLdb.constants import CLIENT, FIELD_TYPE +from MySQLdb.converters import conversions + +# Some of these import MySQLdb, so import them after checking if it's installed. +from .client import DatabaseClient +from .creation import DatabaseCreation +from .features import DatabaseFeatures +from .introspection import DatabaseIntrospection +from .operations import DatabaseOperations +from .schema import DatabaseSchemaEditor +from .validation import DatabaseValidation + +version = Database.version_info +if version < (1, 4, 3): + raise ImproperlyConfigured( + "mysqlclient 1.4.3 or newer is required; you have %s." % Database.__version__ + ) + + +# MySQLdb returns TIME columns as timedelta -- they are more like timedelta in +# terms of actual behavior as they are signed and include days -- and Django +# expects time. +django_conversions = { + **conversions, + **{FIELD_TYPE.TIME: backend_utils.typecast_time}, +} + +# This should match the numerical portion of the version numbers (we can treat +# versions like 5.0.24 and 5.0.24a as the same). +server_version_re = _lazy_re_compile(r"(\d{1,2})\.(\d{1,2})\.(\d{1,2})") + + +class CursorWrapper: + """ + A thin wrapper around MySQLdb's normal cursor class that catches particular + exception instances and reraises them with the correct types. + + Implemented as a wrapper, rather than a subclass, so that it isn't stuck + to the particular underlying representation returned by Connection.cursor(). + """ + + codes_for_integrityerror = ( + 1048, # Column cannot be null + 1690, # BIGINT UNSIGNED value is out of range + 3819, # CHECK constraint is violated + 4025, # CHECK constraint failed + ) + + def __init__(self, cursor): + self.cursor = cursor + + def execute(self, query, args=None): + try: + # args is None means no string interpolation + return self.cursor.execute(query, args) + except Database.OperationalError as e: + # Map some error codes to IntegrityError, since they seem to be + # misclassified and Django would prefer the more logical place. + if e.args[0] in self.codes_for_integrityerror: + raise IntegrityError(*tuple(e.args)) + raise + + def executemany(self, query, args): + try: + return self.cursor.executemany(query, args) + except Database.OperationalError as e: + # Map some error codes to IntegrityError, since they seem to be + # misclassified and Django would prefer the more logical place. + if e.args[0] in self.codes_for_integrityerror: + raise IntegrityError(*tuple(e.args)) + raise + + def __getattr__(self, attr): + return getattr(self.cursor, attr) + + def __iter__(self): + return iter(self.cursor) + + +class DatabaseWrapper(BaseDatabaseWrapper): + vendor = "mysql" + # This dictionary maps Field objects to their associated MySQL column + # types, as strings. Column-type strings can contain format strings; they'll + # be interpolated against the values of Field.__dict__ before being output. + # If a column type is set to None, it won't be included in the output. + data_types = { + "AutoField": "integer AUTO_INCREMENT", + "BigAutoField": "bigint AUTO_INCREMENT", + "BinaryField": "longblob", + "BooleanField": "bool", + "CharField": "varchar(%(max_length)s)", + "DateField": "date", + "DateTimeField": "datetime(6)", + "DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)", + "DurationField": "bigint", + "FileField": "varchar(%(max_length)s)", + "FilePathField": "varchar(%(max_length)s)", + "FloatField": "double precision", + "IntegerField": "integer", + "BigIntegerField": "bigint", + "IPAddressField": "char(15)", + "GenericIPAddressField": "char(39)", + "JSONField": "json", + "OneToOneField": "integer", + "PositiveBigIntegerField": "bigint UNSIGNED", + "PositiveIntegerField": "integer UNSIGNED", + "PositiveSmallIntegerField": "smallint UNSIGNED", + "SlugField": "varchar(%(max_length)s)", + "SmallAutoField": "smallint AUTO_INCREMENT", + "SmallIntegerField": "smallint", + "TextField": "longtext", + "TimeField": "time(6)", + "UUIDField": "char(32)", + } + + # For these data types: + # - MySQL < 8.0.13 doesn't accept default values and implicitly treats them + # as nullable + # - all versions of MySQL and MariaDB don't support full width database + # indexes + _limited_data_types = ( + "tinyblob", + "blob", + "mediumblob", + "longblob", + "tinytext", + "text", + "mediumtext", + "longtext", + "json", + ) + + operators = { + "exact": "= %s", + "iexact": "LIKE %s", + "contains": "LIKE BINARY %s", + "icontains": "LIKE %s", + "gt": "> %s", + "gte": ">= %s", + "lt": "< %s", + "lte": "<= %s", + "startswith": "LIKE BINARY %s", + "endswith": "LIKE BINARY %s", + "istartswith": "LIKE %s", + "iendswith": "LIKE %s", + } + + # The patterns below are used to generate SQL pattern lookup clauses when + # the right-hand side of the lookup isn't a raw string (it might be an expression + # or the result of a bilateral transformation). + # In those cases, special characters for LIKE operators (e.g. \, *, _) should be + # escaped on database side. + # + # Note: we use str.format() here for readability as '%' is used as a wildcard for + # the LIKE operator. + pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')" + pattern_ops = { + "contains": "LIKE BINARY CONCAT('%%', {}, '%%')", + "icontains": "LIKE CONCAT('%%', {}, '%%')", + "startswith": "LIKE BINARY CONCAT({}, '%%')", + "istartswith": "LIKE CONCAT({}, '%%')", + "endswith": "LIKE BINARY CONCAT('%%', {})", + "iendswith": "LIKE CONCAT('%%', {})", + } + + isolation_levels = { + "read uncommitted", + "read committed", + "repeatable read", + "serializable", + } + + Database = Database + SchemaEditorClass = DatabaseSchemaEditor + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + validation_class = DatabaseValidation + + def get_database_version(self): + return self.mysql_version + + def get_connection_params(self): + kwargs = { + "conv": django_conversions, + "charset": "utf8", + } + settings_dict = self.settings_dict + if settings_dict["USER"]: + kwargs["user"] = settings_dict["USER"] + if settings_dict["NAME"]: + kwargs["database"] = settings_dict["NAME"] + if settings_dict["PASSWORD"]: + kwargs["password"] = settings_dict["PASSWORD"] + if settings_dict["HOST"].startswith("/"): + kwargs["unix_socket"] = settings_dict["HOST"] + elif settings_dict["HOST"]: + kwargs["host"] = settings_dict["HOST"] + if settings_dict["PORT"]: + kwargs["port"] = int(settings_dict["PORT"]) + # We need the number of potentially affected rows after an + # "UPDATE", not the number of changed rows. + kwargs["client_flag"] = CLIENT.FOUND_ROWS + # Validate the transaction isolation level, if specified. + options = settings_dict["OPTIONS"].copy() + isolation_level = options.pop("isolation_level", "read committed") + if isolation_level: + isolation_level = isolation_level.lower() + if isolation_level not in self.isolation_levels: + raise ImproperlyConfigured( + "Invalid transaction isolation level '%s' specified.\n" + "Use one of %s, or None." + % ( + isolation_level, + ", ".join("'%s'" % s for s in sorted(self.isolation_levels)), + ) + ) + self.isolation_level = isolation_level + kwargs.update(options) + return kwargs + + @async_unsafe + def get_new_connection(self, conn_params): + connection = Database.connect(**conn_params) + # bytes encoder in mysqlclient doesn't work and was added only to + # prevent KeyErrors in Django < 2.0. We can remove this workaround when + # mysqlclient 2.1 becomes the minimal mysqlclient supported by Django. + # See https://github.com/PyMySQL/mysqlclient/issues/489 + if connection.encoders.get(bytes) is bytes: + connection.encoders.pop(bytes) + return connection + + def init_connection_state(self): + super().init_connection_state() + assignments = [] + if self.features.is_sql_auto_is_null_enabled: + # SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on + # a recently inserted row will return when the field is tested + # for NULL. Disabling this brings this aspect of MySQL in line + # with SQL standards. + assignments.append("SET SQL_AUTO_IS_NULL = 0") + + if self.isolation_level: + assignments.append( + "SET SESSION TRANSACTION ISOLATION LEVEL %s" + % self.isolation_level.upper() + ) + + if assignments: + with self.cursor() as cursor: + cursor.execute("; ".join(assignments)) + + @async_unsafe + def create_cursor(self, name=None): + cursor = self.connection.cursor() + return CursorWrapper(cursor) + + def _rollback(self): + try: + BaseDatabaseWrapper._rollback(self) + except Database.NotSupportedError: + pass + + def _set_autocommit(self, autocommit): + with self.wrap_database_errors: + self.connection.autocommit(autocommit) + + def disable_constraint_checking(self): + """ + Disable foreign key checks, primarily for use in adding rows with + forward references. Always return True to indicate constraint checks + need to be re-enabled. + """ + with self.cursor() as cursor: + cursor.execute("SET foreign_key_checks=0") + return True + + def enable_constraint_checking(self): + """ + Re-enable foreign key checks after they have been disabled. + """ + # Override needs_rollback in case constraint_checks_disabled is + # nested inside transaction.atomic. + self.needs_rollback, needs_rollback = False, self.needs_rollback + try: + with self.cursor() as cursor: + cursor.execute("SET foreign_key_checks=1") + finally: + self.needs_rollback = needs_rollback + + def check_constraints(self, table_names=None): + """ + Check each table name in `table_names` for rows with invalid foreign + key references. This method is intended to be used in conjunction with + `disable_constraint_checking()` and `enable_constraint_checking()`, to + determine if rows with invalid references were entered while constraint + checks were off. + """ + with self.cursor() as cursor: + if table_names is None: + table_names = self.introspection.table_names(cursor) + for table_name in table_names: + primary_key_column_name = self.introspection.get_primary_key_column( + cursor, table_name + ) + if not primary_key_column_name: + continue + relations = self.introspection.get_relations(cursor, table_name) + for column_name, ( + referenced_column_name, + referenced_table_name, + ) in relations.items(): + cursor.execute( + """ + SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING + LEFT JOIN `%s` as REFERRED + ON (REFERRING.`%s` = REFERRED.`%s`) + WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL + """ + % ( + primary_key_column_name, + column_name, + table_name, + referenced_table_name, + column_name, + referenced_column_name, + column_name, + referenced_column_name, + ) + ) + for bad_row in cursor.fetchall(): + raise IntegrityError( + "The row in table '%s' with primary key '%s' has an " + "invalid foreign key: %s.%s contains a value '%s' that " + "does not have a corresponding value in %s.%s." + % ( + table_name, + bad_row[0], + table_name, + column_name, + bad_row[1], + referenced_table_name, + referenced_column_name, + ) + ) + + def is_usable(self): + try: + self.connection.ping() + except Database.Error: + return False + else: + return True + + @cached_property + def display_name(self): + return "MariaDB" if self.mysql_is_mariadb else "MySQL" + + @cached_property + def data_type_check_constraints(self): + if self.features.supports_column_check_constraints: + check_constraints = { + "PositiveBigIntegerField": "`%(column)s` >= 0", + "PositiveIntegerField": "`%(column)s` >= 0", + "PositiveSmallIntegerField": "`%(column)s` >= 0", + } + if self.mysql_is_mariadb and self.mysql_version < (10, 4, 3): + # MariaDB < 10.4.3 doesn't automatically use the JSON_VALID as + # a check constraint. + check_constraints["JSONField"] = "JSON_VALID(`%(column)s`)" + return check_constraints + return {} + + @cached_property + def mysql_server_data(self): + with self.temporary_connection() as cursor: + # Select some server variables and test if the time zone + # definitions are installed. CONVERT_TZ returns NULL if 'UTC' + # timezone isn't loaded into the mysql.time_zone table. + cursor.execute( + """ + SELECT VERSION(), + @@sql_mode, + @@default_storage_engine, + @@sql_auto_is_null, + @@lower_case_table_names, + CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC') IS NOT NULL + """ + ) + row = cursor.fetchone() + return { + "version": row[0], + "sql_mode": row[1], + "default_storage_engine": row[2], + "sql_auto_is_null": bool(row[3]), + "lower_case_table_names": bool(row[4]), + "has_zoneinfo_database": bool(row[5]), + } + + @cached_property + def mysql_server_info(self): + return self.mysql_server_data["version"] + + @cached_property + def mysql_version(self): + match = server_version_re.match(self.mysql_server_info) + if not match: + raise Exception( + "Unable to determine MySQL version from version string %r" + % self.mysql_server_info + ) + return tuple(int(x) for x in match.groups()) + + @cached_property + def mysql_is_mariadb(self): + return "mariadb" in self.mysql_server_info.lower() + + @cached_property + def sql_mode(self): + sql_mode = self.mysql_server_data["sql_mode"] + return set(sql_mode.split(",") if sql_mode else ()) diff --git a/testbed/django__django/django/db/backends/mysql/client.py b/testbed/django__django/django/db/backends/mysql/client.py new file mode 100644 index 0000000000000000000000000000000000000000..6aa11b2e1f82cd91656bd849ca07107ccbef55a3 --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/client.py @@ -0,0 +1,72 @@ +import signal + +from django.db.backends.base.client import BaseDatabaseClient + + +class DatabaseClient(BaseDatabaseClient): + executable_name = "mysql" + + @classmethod + def settings_to_cmd_args_env(cls, settings_dict, parameters): + args = [cls.executable_name] + env = None + database = settings_dict["OPTIONS"].get( + "database", + settings_dict["OPTIONS"].get("db", settings_dict["NAME"]), + ) + user = settings_dict["OPTIONS"].get("user", settings_dict["USER"]) + password = settings_dict["OPTIONS"].get( + "password", + settings_dict["OPTIONS"].get("passwd", settings_dict["PASSWORD"]), + ) + host = settings_dict["OPTIONS"].get("host", settings_dict["HOST"]) + port = settings_dict["OPTIONS"].get("port", settings_dict["PORT"]) + server_ca = settings_dict["OPTIONS"].get("ssl", {}).get("ca") + client_cert = settings_dict["OPTIONS"].get("ssl", {}).get("cert") + client_key = settings_dict["OPTIONS"].get("ssl", {}).get("key") + defaults_file = settings_dict["OPTIONS"].get("read_default_file") + charset = settings_dict["OPTIONS"].get("charset") + # Seems to be no good way to set sql_mode with CLI. + + if defaults_file: + args += ["--defaults-file=%s" % defaults_file] + if user: + args += ["--user=%s" % user] + if password: + # The MYSQL_PWD environment variable usage is discouraged per + # MySQL's documentation due to the possibility of exposure through + # `ps` on old Unix flavors but --password suffers from the same + # flaw on even more systems. Usage of an environment variable also + # prevents password exposure if the subprocess.run(check=True) call + # raises a CalledProcessError since the string representation of + # the latter includes all of the provided `args`. + env = {"MYSQL_PWD": password} + if host: + if "/" in host: + args += ["--socket=%s" % host] + else: + args += ["--host=%s" % host] + if port: + args += ["--port=%s" % port] + if server_ca: + args += ["--ssl-ca=%s" % server_ca] + if client_cert: + args += ["--ssl-cert=%s" % client_cert] + if client_key: + args += ["--ssl-key=%s" % client_key] + if charset: + args += ["--default-character-set=%s" % charset] + if database: + args += [database] + args.extend(parameters) + return args, env + + def runshell(self, parameters): + sigint_handler = signal.getsignal(signal.SIGINT) + try: + # Allow SIGINT to pass to mysql to abort queries. + signal.signal(signal.SIGINT, signal.SIG_IGN) + super().runshell(parameters) + finally: + # Restore the original SIGINT handler. + signal.signal(signal.SIGINT, sigint_handler) diff --git a/testbed/django__django/django/db/backends/mysql/compiler.py b/testbed/django__django/django/db/backends/mysql/compiler.py new file mode 100644 index 0000000000000000000000000000000000000000..2ec6bea2f116571d5548134f98c0fbd76608035f --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/compiler.py @@ -0,0 +1,84 @@ +from django.core.exceptions import FieldError, FullResultSet +from django.db.models.expressions import Col +from django.db.models.sql import compiler + + +class SQLCompiler(compiler.SQLCompiler): + def as_subquery_condition(self, alias, columns, compiler): + qn = compiler.quote_name_unless_alias + qn2 = self.connection.ops.quote_name + sql, params = self.as_sql() + return ( + "(%s) IN (%s)" + % ( + ", ".join("%s.%s" % (qn(alias), qn2(column)) for column in columns), + sql, + ), + params, + ) + + +class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): + pass + + +class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): + def as_sql(self): + # Prefer the non-standard DELETE FROM syntax over the SQL generated by + # the SQLDeleteCompiler's default implementation when multiple tables + # are involved since MySQL/MariaDB will generate a more efficient query + # plan than when using a subquery. + where, having, qualify = self.query.where.split_having_qualify( + must_group_by=self.query.group_by is not None + ) + if self.single_alias or having or qualify: + # DELETE FROM cannot be used when filtering against aggregates or + # window functions as it doesn't allow for GROUP BY/HAVING clauses + # and the subquery wrapping (necessary to emulate QUALIFY). + return super().as_sql() + result = [ + "DELETE %s FROM" + % self.quote_name_unless_alias(self.query.get_initial_alias()) + ] + from_sql, params = self.get_from_clause() + result.extend(from_sql) + try: + where_sql, where_params = self.compile(where) + except FullResultSet: + pass + else: + result.append("WHERE %s" % where_sql) + params.extend(where_params) + return " ".join(result), tuple(params) + + +class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): + def as_sql(self): + update_query, update_params = super().as_sql() + # MySQL and MariaDB support UPDATE ... ORDER BY syntax. + if self.query.order_by: + order_by_sql = [] + order_by_params = [] + db_table = self.query.get_meta().db_table + try: + for resolved, (sql, params, _) in self.get_order_by(): + if ( + isinstance(resolved.expression, Col) + and resolved.expression.alias != db_table + ): + # Ignore ordering if it contains joined fields, because + # they cannot be used in the ORDER BY clause. + raise FieldError + order_by_sql.append(sql) + order_by_params.extend(params) + update_query += " ORDER BY " + ", ".join(order_by_sql) + update_params += tuple(order_by_params) + except FieldError: + # Ignore ordering if it contains annotations, because they're + # removed in .update() and cannot be resolved. + pass + return update_query, update_params + + +class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler): + pass diff --git a/testbed/django__django/django/db/backends/mysql/creation.py b/testbed/django__django/django/db/backends/mysql/creation.py new file mode 100644 index 0000000000000000000000000000000000000000..a060f41d180f38951dd900e86380659b15c34d5c --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/creation.py @@ -0,0 +1,87 @@ +import os +import subprocess +import sys + +from django.db.backends.base.creation import BaseDatabaseCreation + +from .client import DatabaseClient + + +class DatabaseCreation(BaseDatabaseCreation): + def sql_table_creation_suffix(self): + suffix = [] + test_settings = self.connection.settings_dict["TEST"] + if test_settings["CHARSET"]: + suffix.append("CHARACTER SET %s" % test_settings["CHARSET"]) + if test_settings["COLLATION"]: + suffix.append("COLLATE %s" % test_settings["COLLATION"]) + return " ".join(suffix) + + def _execute_create_test_db(self, cursor, parameters, keepdb=False): + try: + super()._execute_create_test_db(cursor, parameters, keepdb) + except Exception as e: + if len(e.args) < 1 or e.args[0] != 1007: + # All errors except "database exists" (1007) cancel tests. + self.log("Got an error creating the test database: %s" % e) + sys.exit(2) + else: + raise + + def _clone_test_db(self, suffix, verbosity, keepdb=False): + source_database_name = self.connection.settings_dict["NAME"] + target_database_name = self.get_test_db_clone_settings(suffix)["NAME"] + test_db_params = { + "dbname": self.connection.ops.quote_name(target_database_name), + "suffix": self.sql_table_creation_suffix(), + } + with self._nodb_cursor() as cursor: + try: + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception: + if keepdb: + # If the database should be kept, skip everything else. + return + try: + if verbosity >= 1: + self.log( + "Destroying old test database for alias %s..." + % ( + self._get_database_display_str( + verbosity, target_database_name + ), + ) + ) + cursor.execute("DROP DATABASE %(dbname)s" % test_db_params) + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception as e: + self.log("Got an error recreating the test database: %s" % e) + sys.exit(2) + self._clone_db(source_database_name, target_database_name) + + def _clone_db(self, source_database_name, target_database_name): + cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env( + self.connection.settings_dict, [] + ) + dump_cmd = [ + "mysqldump", + *cmd_args[1:-1], + "--routines", + "--events", + source_database_name, + ] + dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None + load_cmd = cmd_args + load_cmd[-1] = target_database_name + + with subprocess.Popen( + dump_cmd, stdout=subprocess.PIPE, env=dump_env + ) as dump_proc: + with subprocess.Popen( + load_cmd, + stdin=dump_proc.stdout, + stdout=subprocess.DEVNULL, + env=load_env, + ): + # Allow dump_proc to receive a SIGPIPE if the load process exits. + dump_proc.stdout.close() diff --git a/testbed/django__django/django/db/backends/mysql/features.py b/testbed/django__django/django/db/backends/mysql/features.py new file mode 100644 index 0000000000000000000000000000000000000000..0bb0f91f5527ff23116a4b2f8ef3bf8d63e6b751 --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/features.py @@ -0,0 +1,351 @@ +import operator + +from django.db.backends.base.features import BaseDatabaseFeatures +from django.utils.functional import cached_property + + +class DatabaseFeatures(BaseDatabaseFeatures): + empty_fetchmany_value = () + allows_group_by_selected_pks = True + related_fields_match_type = True + # MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME. + allow_sliced_subqueries_with_in = False + has_select_for_update = True + supports_forward_references = False + supports_regex_backreferencing = False + supports_date_lookup_using_string = False + supports_timezones = False + requires_explicit_null_ordering_when_grouping = True + atomic_transactions = False + can_clone_databases = True + supports_comments = True + supports_comments_inline = True + supports_temporal_subtraction = True + supports_slicing_ordering_in_compound = True + supports_index_on_text_field = False + supports_update_conflicts = True + delete_can_self_reference_subquery = False + create_test_procedure_without_params_sql = """ + CREATE PROCEDURE test_procedure () + BEGIN + DECLARE V_I INTEGER; + SET V_I = 1; + END; + """ + create_test_procedure_with_int_param_sql = """ + CREATE PROCEDURE test_procedure (P_I INTEGER) + BEGIN + DECLARE V_I INTEGER; + SET V_I = P_I; + END; + """ + create_test_table_with_composite_primary_key = """ + CREATE TABLE test_table_composite_pk ( + column_1 INTEGER NOT NULL, + column_2 INTEGER NOT NULL, + PRIMARY KEY(column_1, column_2) + ) + """ + # Neither MySQL nor MariaDB support partial indexes. + supports_partial_indexes = False + # COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an + # indexed expression. + collate_as_index_expression = True + insert_test_table_with_defaults = "INSERT INTO {} () VALUES ()" + + supports_order_by_nulls_modifier = False + order_by_nulls_first = True + supports_logical_xor = True + + @cached_property + def minimum_database_version(self): + if self.connection.mysql_is_mariadb: + return (10, 4) + else: + return (8,) + + @cached_property + def test_collations(self): + charset = "utf8" + if ( + self.connection.mysql_is_mariadb + and self.connection.mysql_version >= (10, 6) + ) or ( + not self.connection.mysql_is_mariadb + and self.connection.mysql_version >= (8, 0, 30) + ): + # utf8 is an alias for utf8mb3 in MariaDB 10.6+ and MySQL 8.0.30+. + charset = "utf8mb3" + return { + "ci": f"{charset}_general_ci", + "non_default": f"{charset}_esperanto_ci", + "swedish_ci": f"{charset}_swedish_ci", + } + + test_now_utc_template = "UTC_TIMESTAMP(6)" + + @cached_property + def django_test_skips(self): + skips = { + "This doesn't work on MySQL.": { + "db_functions.comparison.test_greatest.GreatestTests." + "test_coalesce_workaround", + "db_functions.comparison.test_least.LeastTests." + "test_coalesce_workaround", + }, + "Running on MySQL requires utf8mb4 encoding (#18392).": { + "model_fields.test_textfield.TextFieldTests.test_emoji", + "model_fields.test_charfield.TestCharField.test_emoji", + }, + "MySQL doesn't support functional indexes on a function that " + "returns JSON": { + "schema.tests.SchemaTests.test_func_index_json_key_transform", + }, + "MySQL supports multiplying and dividing DurationFields by a " + "scalar value but it's not implemented (#25287).": { + "expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide", + }, + "UPDATE ... ORDER BY syntax on MySQL/MariaDB does not support ordering by" + "related fields.": { + "update.tests.AdvancedTests." + "test_update_ordered_by_inline_m2m_annotation", + "update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation", + "update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation_desc", + }, + } + if self.connection.mysql_is_mariadb and ( + 10, + 4, + 3, + ) < self.connection.mysql_version < (10, 5, 2): + skips.update( + { + "https://jira.mariadb.org/browse/MDEV-19598": { + "schema.tests.SchemaTests." + "test_alter_not_unique_field_to_primary_key", + }, + } + ) + if self.connection.mysql_is_mariadb and ( + 10, + 4, + 12, + ) < self.connection.mysql_version < (10, 5): + skips.update( + { + "https://jira.mariadb.org/browse/MDEV-22775": { + "schema.tests.SchemaTests." + "test_alter_pk_with_self_referential_field", + }, + } + ) + if not self.supports_explain_analyze: + skips.update( + { + "MariaDB and MySQL >= 8.0.18 specific.": { + "queries.test_explain.ExplainTests.test_mysql_analyze", + }, + } + ) + if "ONLY_FULL_GROUP_BY" in self.connection.sql_mode: + skips.update( + { + "GROUP BY cannot contain nonaggregated column when " + "ONLY_FULL_GROUP_BY mode is enabled on MySQL, see #34262.": { + "aggregation.tests.AggregateTestCase." + "test_group_by_nested_expression_with_params", + }, + } + ) + if self.connection.mysql_version < (8, 0, 31): + skips.update( + { + "Nesting of UNIONs at the right-hand side is not supported on " + "MySQL < 8.0.31": { + "queries.test_qs_combinators.QuerySetSetOperationTests." + "test_union_nested" + }, + } + ) + return skips + + @cached_property + def _mysql_storage_engine(self): + "Internal method used in Django tests. Don't rely on this from your code" + return self.connection.mysql_server_data["default_storage_engine"] + + @cached_property + def allows_auto_pk_0(self): + """ + Autoincrement primary key can be set to 0 if it doesn't generate new + autoincrement values. + """ + return "NO_AUTO_VALUE_ON_ZERO" in self.connection.sql_mode + + @cached_property + def update_can_self_select(self): + return self.connection.mysql_is_mariadb and self.connection.mysql_version >= ( + 10, + 3, + 2, + ) + + @cached_property + def can_introspect_foreign_keys(self): + "Confirm support for introspected foreign keys" + return self._mysql_storage_engine != "MyISAM" + + @cached_property + def introspected_field_types(self): + return { + **super().introspected_field_types, + "BinaryField": "TextField", + "BooleanField": "IntegerField", + "DurationField": "BigIntegerField", + "GenericIPAddressField": "CharField", + } + + @cached_property + def can_return_columns_from_insert(self): + return self.connection.mysql_is_mariadb and self.connection.mysql_version >= ( + 10, + 5, + 0, + ) + + can_return_rows_from_bulk_insert = property( + operator.attrgetter("can_return_columns_from_insert") + ) + + @cached_property + def has_zoneinfo_database(self): + return self.connection.mysql_server_data["has_zoneinfo_database"] + + @cached_property + def is_sql_auto_is_null_enabled(self): + return self.connection.mysql_server_data["sql_auto_is_null"] + + @cached_property + def supports_over_clause(self): + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 2) + + supports_frame_range_fixed_distance = property( + operator.attrgetter("supports_over_clause") + ) + + @cached_property + def supports_column_check_constraints(self): + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 16) + + supports_table_check_constraints = property( + operator.attrgetter("supports_column_check_constraints") + ) + + @cached_property + def can_introspect_check_constraints(self): + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 16) + + @cached_property + def has_select_for_update_skip_locked(self): + if self.connection.mysql_is_mariadb: + return self.connection.mysql_version >= (10, 6) + return self.connection.mysql_version >= (8, 0, 1) + + @cached_property + def has_select_for_update_nowait(self): + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 1) + + @cached_property + def has_select_for_update_of(self): + return ( + not self.connection.mysql_is_mariadb + and self.connection.mysql_version >= (8, 0, 1) + ) + + @cached_property + def supports_explain_analyze(self): + return self.connection.mysql_is_mariadb or self.connection.mysql_version >= ( + 8, + 0, + 18, + ) + + @cached_property + def supported_explain_formats(self): + # Alias MySQL's TRADITIONAL to TEXT for consistency with other + # backends. + formats = {"JSON", "TEXT", "TRADITIONAL"} + if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= ( + 8, + 0, + 16, + ): + formats.add("TREE") + return formats + + @cached_property + def supports_transactions(self): + """ + All storage engines except MyISAM support transactions. + """ + return self._mysql_storage_engine != "MyISAM" + + @cached_property + def ignores_table_name_case(self): + return self.connection.mysql_server_data["lower_case_table_names"] + + @cached_property + def supports_default_in_lead_lag(self): + # To be added in https://jira.mariadb.org/browse/MDEV-12981. + return not self.connection.mysql_is_mariadb + + @cached_property + def can_introspect_json_field(self): + if self.connection.mysql_is_mariadb: + return self.can_introspect_check_constraints + return True + + @cached_property + def supports_index_column_ordering(self): + if self._mysql_storage_engine != "InnoDB": + return False + if self.connection.mysql_is_mariadb: + return self.connection.mysql_version >= (10, 8) + return self.connection.mysql_version >= (8, 0, 1) + + @cached_property + def supports_expression_indexes(self): + return ( + not self.connection.mysql_is_mariadb + and self._mysql_storage_engine != "MyISAM" + and self.connection.mysql_version >= (8, 0, 13) + ) + + @cached_property + def supports_select_intersection(self): + is_mariadb = self.connection.mysql_is_mariadb + return is_mariadb or self.connection.mysql_version >= (8, 0, 31) + + supports_select_difference = property( + operator.attrgetter("supports_select_intersection") + ) + + @cached_property + def can_rename_index(self): + if self.connection.mysql_is_mariadb: + return self.connection.mysql_version >= (10, 5, 2) + return True + + @cached_property + def supports_expression_defaults(self): + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 13) diff --git a/testbed/django__django/django/db/backends/mysql/introspection.py b/testbed/django__django/django/db/backends/mysql/introspection.py new file mode 100644 index 0000000000000000000000000000000000000000..a5ebf37112f2ec3eadc24f142c51a86894ccd27f --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/introspection.py @@ -0,0 +1,349 @@ +from collections import namedtuple + +import sqlparse +from MySQLdb.constants import FIELD_TYPE + +from django.db.backends.base.introspection import BaseDatabaseIntrospection +from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo +from django.db.models import Index +from django.utils.datastructures import OrderedSet + +FieldInfo = namedtuple( + "FieldInfo", + BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint", "comment"), +) +InfoLine = namedtuple( + "InfoLine", + "col_name data_type max_len num_prec num_scale extra column_default " + "collation is_unsigned comment", +) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) + + +class DatabaseIntrospection(BaseDatabaseIntrospection): + data_types_reverse = { + FIELD_TYPE.BLOB: "TextField", + FIELD_TYPE.CHAR: "CharField", + FIELD_TYPE.DECIMAL: "DecimalField", + FIELD_TYPE.NEWDECIMAL: "DecimalField", + FIELD_TYPE.DATE: "DateField", + FIELD_TYPE.DATETIME: "DateTimeField", + FIELD_TYPE.DOUBLE: "FloatField", + FIELD_TYPE.FLOAT: "FloatField", + FIELD_TYPE.INT24: "IntegerField", + FIELD_TYPE.JSON: "JSONField", + FIELD_TYPE.LONG: "IntegerField", + FIELD_TYPE.LONGLONG: "BigIntegerField", + FIELD_TYPE.SHORT: "SmallIntegerField", + FIELD_TYPE.STRING: "CharField", + FIELD_TYPE.TIME: "TimeField", + FIELD_TYPE.TIMESTAMP: "DateTimeField", + FIELD_TYPE.TINY: "IntegerField", + FIELD_TYPE.TINY_BLOB: "TextField", + FIELD_TYPE.MEDIUM_BLOB: "TextField", + FIELD_TYPE.LONG_BLOB: "TextField", + FIELD_TYPE.VAR_STRING: "CharField", + } + + def get_field_type(self, data_type, description): + field_type = super().get_field_type(data_type, description) + if "auto_increment" in description.extra: + if field_type == "IntegerField": + return "AutoField" + elif field_type == "BigIntegerField": + return "BigAutoField" + elif field_type == "SmallIntegerField": + return "SmallAutoField" + if description.is_unsigned: + if field_type == "BigIntegerField": + return "PositiveBigIntegerField" + elif field_type == "IntegerField": + return "PositiveIntegerField" + elif field_type == "SmallIntegerField": + return "PositiveSmallIntegerField" + # JSON data type is an alias for LONGTEXT in MariaDB, use check + # constraints clauses to introspect JSONField. + if description.has_json_constraint: + return "JSONField" + return field_type + + def get_table_list(self, cursor): + """Return a list of table and view names in the current database.""" + cursor.execute( + """ + SELECT + table_name, + table_type, + table_comment + FROM information_schema.tables + WHERE table_schema = DATABASE() + """ + ) + return [ + TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1]), row[2]) + for row in cursor.fetchall() + ] + + def get_table_description(self, cursor, table_name): + """ + Return a description of the table with the DB-API cursor.description + interface." + """ + json_constraints = {} + if ( + self.connection.mysql_is_mariadb + and self.connection.features.can_introspect_json_field + ): + # JSON data type is an alias for LONGTEXT in MariaDB, select + # JSON_VALID() constraints to introspect JSONField. + cursor.execute( + """ + SELECT c.constraint_name AS column_name + FROM information_schema.check_constraints AS c + WHERE + c.table_name = %s AND + LOWER(c.check_clause) = + 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND + c.constraint_schema = DATABASE() + """, + [table_name], + ) + json_constraints = {row[0] for row in cursor.fetchall()} + # A default collation for the given table. + cursor.execute( + """ + SELECT table_collation + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = %s + """, + [table_name], + ) + row = cursor.fetchone() + default_column_collation = row[0] if row else "" + # information_schema database gives more accurate results for some figures: + # - varchar length returned by cursor.description is an internal length, + # not visible length (#5725) + # - precision and scale (for decimal fields) (#5014) + # - auto_increment is not available in cursor.description + cursor.execute( + """ + SELECT + column_name, data_type, character_maximum_length, + numeric_precision, numeric_scale, extra, column_default, + CASE + WHEN collation_name = %s THEN NULL + ELSE collation_name + END AS collation_name, + CASE + WHEN column_type LIKE '%% unsigned' THEN 1 + ELSE 0 + END AS is_unsigned, + column_comment + FROM information_schema.columns + WHERE table_name = %s AND table_schema = DATABASE() + """, + [default_column_collation, table_name], + ) + field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()} + + cursor.execute( + "SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name) + ) + + def to_int(i): + return int(i) if i is not None else i + + fields = [] + for line in cursor.description: + info = field_info[line[0]] + fields.append( + FieldInfo( + *line[:2], + to_int(info.max_len) or line[2], + to_int(info.max_len) or line[3], + to_int(info.num_prec) or line[4], + to_int(info.num_scale) or line[5], + line[6], + info.column_default, + info.collation, + info.extra, + info.is_unsigned, + line[0] in json_constraints, + info.comment, + ) + ) + return fields + + def get_sequences(self, cursor, table_name, table_fields=()): + for field_info in self.get_table_description(cursor, table_name): + if "auto_increment" in field_info.extra: + # MySQL allows only one auto-increment column per table. + return [{"table": table_name, "column": field_info.name}] + return [] + + def get_relations(self, cursor, table_name): + """ + Return a dictionary of {field_name: (field_name_other_table, other_table)} + representing all foreign keys in the given table. + """ + cursor.execute( + """ + SELECT column_name, referenced_column_name, referenced_table_name + FROM information_schema.key_column_usage + WHERE table_name = %s + AND table_schema = DATABASE() + AND referenced_table_name IS NOT NULL + AND referenced_column_name IS NOT NULL + """, + [table_name], + ) + return { + field_name: (other_field, other_table) + for field_name, other_field, other_table in cursor.fetchall() + } + + def get_storage_engine(self, cursor, table_name): + """ + Retrieve the storage engine for a given table. Return the default + storage engine if the table doesn't exist. + """ + cursor.execute( + """ + SELECT engine + FROM information_schema.tables + WHERE + table_name = %s AND + table_schema = DATABASE() + """, + [table_name], + ) + result = cursor.fetchone() + if not result: + return self.connection.features._mysql_storage_engine + return result[0] + + def _parse_constraint_columns(self, check_clause, columns): + check_columns = OrderedSet() + statement = sqlparse.parse(check_clause)[0] + tokens = (token for token in statement.flatten() if not token.is_whitespace) + for token in tokens: + if ( + token.ttype == sqlparse.tokens.Name + and self.connection.ops.quote_name(token.value) == token.value + and token.value[1:-1] in columns + ): + check_columns.add(token.value[1:-1]) + return check_columns + + def get_constraints(self, cursor, table_name): + """ + Retrieve any constraints or keys (unique, pk, fk, check, index) across + one or more columns. + """ + constraints = {} + # Get the actual constraint names and columns + name_query = """ + SELECT kc.`constraint_name`, kc.`column_name`, + kc.`referenced_table_name`, kc.`referenced_column_name`, + c.`constraint_type` + FROM + information_schema.key_column_usage AS kc, + information_schema.table_constraints AS c + WHERE + kc.table_schema = DATABASE() AND + c.table_schema = kc.table_schema AND + c.constraint_name = kc.constraint_name AND + c.constraint_type != 'CHECK' AND + kc.table_name = %s + ORDER BY kc.`ordinal_position` + """ + cursor.execute(name_query, [table_name]) + for constraint, column, ref_table, ref_column, kind in cursor.fetchall(): + if constraint not in constraints: + constraints[constraint] = { + "columns": OrderedSet(), + "primary_key": kind == "PRIMARY KEY", + "unique": kind in {"PRIMARY KEY", "UNIQUE"}, + "index": False, + "check": False, + "foreign_key": (ref_table, ref_column) if ref_column else None, + } + if self.connection.features.supports_index_column_ordering: + constraints[constraint]["orders"] = [] + constraints[constraint]["columns"].add(column) + # Add check constraints. + if self.connection.features.can_introspect_check_constraints: + unnamed_constraints_index = 0 + columns = { + info.name for info in self.get_table_description(cursor, table_name) + } + if self.connection.mysql_is_mariadb: + type_query = """ + SELECT c.constraint_name, c.check_clause + FROM information_schema.check_constraints AS c + WHERE + c.constraint_schema = DATABASE() AND + c.table_name = %s + """ + else: + type_query = """ + SELECT cc.constraint_name, cc.check_clause + FROM + information_schema.check_constraints AS cc, + information_schema.table_constraints AS tc + WHERE + cc.constraint_schema = DATABASE() AND + tc.table_schema = cc.constraint_schema AND + cc.constraint_name = tc.constraint_name AND + tc.constraint_type = 'CHECK' AND + tc.table_name = %s + """ + cursor.execute(type_query, [table_name]) + for constraint, check_clause in cursor.fetchall(): + constraint_columns = self._parse_constraint_columns( + check_clause, columns + ) + # Ensure uniqueness of unnamed constraints. Unnamed unique + # and check columns constraints have the same name as + # a column. + if set(constraint_columns) == {constraint}: + unnamed_constraints_index += 1 + constraint = "__unnamed_constraint_%s__" % unnamed_constraints_index + constraints[constraint] = { + "columns": constraint_columns, + "primary_key": False, + "unique": False, + "index": False, + "check": True, + "foreign_key": None, + } + # Now add in the indexes + cursor.execute( + "SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name) + ) + for table, non_unique, index, colseq, column, order, type_ in [ + x[:6] + (x[10],) for x in cursor.fetchall() + ]: + if index not in constraints: + constraints[index] = { + "columns": OrderedSet(), + "primary_key": False, + "unique": not non_unique, + "check": False, + "foreign_key": None, + } + if self.connection.features.supports_index_column_ordering: + constraints[index]["orders"] = [] + constraints[index]["index"] = True + constraints[index]["type"] = ( + Index.suffix if type_ == "BTREE" else type_.lower() + ) + constraints[index]["columns"].add(column) + if self.connection.features.supports_index_column_ordering: + constraints[index]["orders"].append("DESC" if order == "D" else "ASC") + # Convert the sorted sets to lists + for constraint in constraints.values(): + constraint["columns"] = list(constraint["columns"]) + return constraints diff --git a/testbed/django__django/django/db/backends/mysql/operations.py b/testbed/django__django/django/db/backends/mysql/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..76ca8157656c50168b69331757b73c89e6cc5d73 --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/operations.py @@ -0,0 +1,464 @@ +import uuid + +from django.conf import settings +from django.db.backends.base.operations import BaseDatabaseOperations +from django.db.backends.utils import split_tzname_delta +from django.db.models import Exists, ExpressionWrapper, Lookup +from django.db.models.constants import OnConflict +from django.utils import timezone +from django.utils.encoding import force_str +from django.utils.regex_helper import _lazy_re_compile + + +class DatabaseOperations(BaseDatabaseOperations): + compiler_module = "django.db.backends.mysql.compiler" + + # MySQL stores positive fields as UNSIGNED ints. + integer_field_ranges = { + **BaseDatabaseOperations.integer_field_ranges, + "PositiveSmallIntegerField": (0, 65535), + "PositiveIntegerField": (0, 4294967295), + "PositiveBigIntegerField": (0, 18446744073709551615), + } + cast_data_types = { + "AutoField": "signed integer", + "BigAutoField": "signed integer", + "SmallAutoField": "signed integer", + "CharField": "char(%(max_length)s)", + "DecimalField": "decimal(%(max_digits)s, %(decimal_places)s)", + "TextField": "char", + "IntegerField": "signed integer", + "BigIntegerField": "signed integer", + "SmallIntegerField": "signed integer", + "PositiveBigIntegerField": "unsigned integer", + "PositiveIntegerField": "unsigned integer", + "PositiveSmallIntegerField": "unsigned integer", + "DurationField": "signed integer", + } + cast_char_field_without_max_length = "char" + explain_prefix = "EXPLAIN" + + # EXTRACT format cannot be passed in parameters. + _extract_format_re = _lazy_re_compile(r"[A-Z_]+") + + def date_extract_sql(self, lookup_type, sql, params): + # https://dev.mysql.com/doc/mysql/en/date-and-time-functions.html + if lookup_type == "week_day": + # DAYOFWEEK() returns an integer, 1-7, Sunday=1. + return f"DAYOFWEEK({sql})", params + elif lookup_type == "iso_week_day": + # WEEKDAY() returns an integer, 0-6, Monday=0. + return f"WEEKDAY({sql}) + 1", params + elif lookup_type == "week": + # Override the value of default_week_format for consistency with + # other database backends. + # Mode 3: Monday, 1-53, with 4 or more days this year. + return f"WEEK({sql}, 3)", params + elif lookup_type == "iso_year": + # Get the year part from the YEARWEEK function, which returns a + # number as year * 100 + week. + return f"TRUNCATE(YEARWEEK({sql}, 3), -2) / 100", params + else: + # EXTRACT returns 1-53 based on ISO-8601 for the week number. + lookup_type = lookup_type.upper() + if not self._extract_format_re.fullmatch(lookup_type): + raise ValueError(f"Invalid loookup type: {lookup_type!r}") + return f"EXTRACT({lookup_type} FROM {sql})", params + + def date_trunc_sql(self, lookup_type, sql, params, tzname=None): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + fields = { + "year": "%Y-01-01", + "month": "%Y-%m-01", + } + if lookup_type in fields: + format_str = fields[lookup_type] + return f"CAST(DATE_FORMAT({sql}, %s) AS DATE)", (*params, format_str) + elif lookup_type == "quarter": + return ( + f"MAKEDATE(YEAR({sql}), 1) + " + f"INTERVAL QUARTER({sql}) QUARTER - INTERVAL 1 QUARTER", + (*params, *params), + ) + elif lookup_type == "week": + return f"DATE_SUB({sql}, INTERVAL WEEKDAY({sql}) DAY)", (*params, *params) + else: + return f"DATE({sql})", params + + def _prepare_tzname_delta(self, tzname): + tzname, sign, offset = split_tzname_delta(tzname) + return f"{sign}{offset}" if offset else tzname + + def _convert_sql_to_tz(self, sql, params, tzname): + if tzname and settings.USE_TZ and self.connection.timezone_name != tzname: + return f"CONVERT_TZ({sql}, %s, %s)", ( + *params, + self.connection.timezone_name, + self._prepare_tzname_delta(tzname), + ) + return sql, params + + def datetime_cast_date_sql(self, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + return f"DATE({sql})", params + + def datetime_cast_time_sql(self, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + return f"TIME({sql})", params + + def datetime_extract_sql(self, lookup_type, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + return self.date_extract_sql(lookup_type, sql, params) + + def datetime_trunc_sql(self, lookup_type, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + fields = ["year", "month", "day", "hour", "minute", "second"] + format = ("%Y-", "%m", "-%d", " %H:", "%i", ":%s") + format_def = ("0000-", "01", "-01", " 00:", "00", ":00") + if lookup_type == "quarter": + return ( + f"CAST(DATE_FORMAT(MAKEDATE(YEAR({sql}), 1) + " + f"INTERVAL QUARTER({sql}) QUARTER - " + f"INTERVAL 1 QUARTER, %s) AS DATETIME)" + ), (*params, *params, "%Y-%m-01 00:00:00") + if lookup_type == "week": + return ( + f"CAST(DATE_FORMAT(" + f"DATE_SUB({sql}, INTERVAL WEEKDAY({sql}) DAY), %s) AS DATETIME)" + ), (*params, *params, "%Y-%m-%d 00:00:00") + try: + i = fields.index(lookup_type) + 1 + except ValueError: + pass + else: + format_str = "".join(format[:i] + format_def[i:]) + return f"CAST(DATE_FORMAT({sql}, %s) AS DATETIME)", (*params, format_str) + return sql, params + + def time_trunc_sql(self, lookup_type, sql, params, tzname=None): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + fields = { + "hour": "%H:00:00", + "minute": "%H:%i:00", + "second": "%H:%i:%s", + } + if lookup_type in fields: + format_str = fields[lookup_type] + return f"CAST(DATE_FORMAT({sql}, %s) AS TIME)", (*params, format_str) + else: + return f"TIME({sql})", params + + def fetch_returned_insert_rows(self, cursor): + """ + Given a cursor object that has just performed an INSERT...RETURNING + statement into a table, return the tuple of returned data. + """ + return cursor.fetchall() + + def format_for_duration_arithmetic(self, sql): + return "INTERVAL %s MICROSECOND" % sql + + def force_no_ordering(self): + """ + "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped + columns. If no ordering would otherwise be applied, we don't want any + implicit sorting going on. + """ + return [(None, ("NULL", [], False))] + + def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): + return value + + def last_executed_query(self, cursor, sql, params): + # With MySQLdb, cursor objects have an (undocumented) "_executed" + # attribute where the exact query sent to the database is saved. + # See MySQLdb/cursors.py in the source distribution. + # MySQLdb returns string, PyMySQL bytes. + return force_str(getattr(cursor, "_executed", None), errors="replace") + + def no_limit_value(self): + # 2**64 - 1, as recommended by the MySQL documentation + return 18446744073709551615 + + def quote_name(self, name): + if name.startswith("`") and name.endswith("`"): + return name # Quoting once is enough. + return "`%s`" % name + + def return_insert_columns(self, fields): + # MySQL and MariaDB < 10.5.0 don't support an INSERT...RETURNING + # statement. + if not fields: + return "", () + columns = [ + "%s.%s" + % ( + self.quote_name(field.model._meta.db_table), + self.quote_name(field.column), + ) + for field in fields + ] + return "RETURNING %s" % ", ".join(columns), () + + def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): + if not tables: + return [] + + sql = ["SET FOREIGN_KEY_CHECKS = 0;"] + if reset_sequences: + # It's faster to TRUNCATE tables that require a sequence reset + # since ALTER TABLE AUTO_INCREMENT is slower than TRUNCATE. + sql.extend( + "%s %s;" + % ( + style.SQL_KEYWORD("TRUNCATE"), + style.SQL_FIELD(self.quote_name(table_name)), + ) + for table_name in tables + ) + else: + # Otherwise issue a simple DELETE since it's faster than TRUNCATE + # and preserves sequences. + sql.extend( + "%s %s %s;" + % ( + style.SQL_KEYWORD("DELETE"), + style.SQL_KEYWORD("FROM"), + style.SQL_FIELD(self.quote_name(table_name)), + ) + for table_name in tables + ) + sql.append("SET FOREIGN_KEY_CHECKS = 1;") + return sql + + def sequence_reset_by_name_sql(self, style, sequences): + return [ + "%s %s %s %s = 1;" + % ( + style.SQL_KEYWORD("ALTER"), + style.SQL_KEYWORD("TABLE"), + style.SQL_FIELD(self.quote_name(sequence_info["table"])), + style.SQL_FIELD("AUTO_INCREMENT"), + ) + for sequence_info in sequences + ] + + def validate_autopk_value(self, value): + # Zero in AUTO_INCREMENT field does not work without the + # NO_AUTO_VALUE_ON_ZERO SQL mode. + if value == 0 and not self.connection.features.allows_auto_pk_0: + raise ValueError( + "The database backend does not accept 0 as a value for AutoField." + ) + return value + + def adapt_datetimefield_value(self, value): + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + # MySQL doesn't support tz-aware datetimes + if timezone.is_aware(value): + if settings.USE_TZ: + value = timezone.make_naive(value, self.connection.timezone) + else: + raise ValueError( + "MySQL backend does not support timezone-aware datetimes when " + "USE_TZ is False." + ) + return str(value) + + def adapt_timefield_value(self, value): + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + # MySQL doesn't support tz-aware times + if timezone.is_aware(value): + raise ValueError("MySQL backend does not support timezone-aware times.") + + return value.isoformat(timespec="microseconds") + + def max_name_length(self): + return 64 + + def pk_default_value(self): + return "NULL" + + def bulk_insert_sql(self, fields, placeholder_rows): + placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) + values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) + return "VALUES " + values_sql + + def combine_expression(self, connector, sub_expressions): + if connector == "^": + return "POW(%s)" % ",".join(sub_expressions) + # Convert the result to a signed integer since MySQL's binary operators + # return an unsigned integer. + elif connector in ("&", "|", "<<", "#"): + connector = "^" if connector == "#" else connector + return "CONVERT(%s, SIGNED)" % connector.join(sub_expressions) + elif connector == ">>": + lhs, rhs = sub_expressions + return "FLOOR(%(lhs)s / POW(2, %(rhs)s))" % {"lhs": lhs, "rhs": rhs} + return super().combine_expression(connector, sub_expressions) + + def get_db_converters(self, expression): + converters = super().get_db_converters(expression) + internal_type = expression.output_field.get_internal_type() + if internal_type == "BooleanField": + converters.append(self.convert_booleanfield_value) + elif internal_type == "DateTimeField": + if settings.USE_TZ: + converters.append(self.convert_datetimefield_value) + elif internal_type == "UUIDField": + converters.append(self.convert_uuidfield_value) + return converters + + def convert_booleanfield_value(self, value, expression, connection): + if value in (0, 1): + value = bool(value) + return value + + def convert_datetimefield_value(self, value, expression, connection): + if value is not None: + value = timezone.make_aware(value, self.connection.timezone) + return value + + def convert_uuidfield_value(self, value, expression, connection): + if value is not None: + value = uuid.UUID(value) + return value + + def binary_placeholder_sql(self, value): + return ( + "_binary %s" if value is not None and not hasattr(value, "as_sql") else "%s" + ) + + def subtract_temporals(self, internal_type, lhs, rhs): + lhs_sql, lhs_params = lhs + rhs_sql, rhs_params = rhs + if internal_type == "TimeField": + if self.connection.mysql_is_mariadb: + # MariaDB includes the microsecond component in TIME_TO_SEC as + # a decimal. MySQL returns an integer without microseconds. + return ( + "CAST((TIME_TO_SEC(%(lhs)s) - TIME_TO_SEC(%(rhs)s)) " + "* 1000000 AS SIGNED)" + ) % { + "lhs": lhs_sql, + "rhs": rhs_sql, + }, ( + *lhs_params, + *rhs_params, + ) + return ( + "((TIME_TO_SEC(%(lhs)s) * 1000000 + MICROSECOND(%(lhs)s)) -" + " (TIME_TO_SEC(%(rhs)s) * 1000000 + MICROSECOND(%(rhs)s)))" + ) % {"lhs": lhs_sql, "rhs": rhs_sql}, tuple(lhs_params) * 2 + tuple( + rhs_params + ) * 2 + params = (*rhs_params, *lhs_params) + return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), params + + def explain_query_prefix(self, format=None, **options): + # Alias MySQL's TRADITIONAL to TEXT for consistency with other backends. + if format and format.upper() == "TEXT": + format = "TRADITIONAL" + elif ( + not format and "TREE" in self.connection.features.supported_explain_formats + ): + # Use TREE by default (if supported) as it's more informative. + format = "TREE" + analyze = options.pop("analyze", False) + prefix = super().explain_query_prefix(format, **options) + if analyze and self.connection.features.supports_explain_analyze: + # MariaDB uses ANALYZE instead of EXPLAIN ANALYZE. + prefix = ( + "ANALYZE" if self.connection.mysql_is_mariadb else prefix + " ANALYZE" + ) + if format and not (analyze and not self.connection.mysql_is_mariadb): + # Only MariaDB supports the analyze option with formats. + prefix += " FORMAT=%s" % format + return prefix + + def regex_lookup(self, lookup_type): + # REGEXP_LIKE doesn't exist in MariaDB. + if self.connection.mysql_is_mariadb: + if lookup_type == "regex": + return "%s REGEXP BINARY %s" + return "%s REGEXP %s" + + match_option = "c" if lookup_type == "regex" else "i" + return "REGEXP_LIKE(%%s, %%s, '%s')" % match_option + + def insert_statement(self, on_conflict=None): + if on_conflict == OnConflict.IGNORE: + return "INSERT IGNORE INTO" + return super().insert_statement(on_conflict=on_conflict) + + def lookup_cast(self, lookup_type, internal_type=None): + lookup = "%s" + if internal_type == "JSONField": + if self.connection.mysql_is_mariadb or lookup_type in ( + "iexact", + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "regex", + "iregex", + ): + lookup = "JSON_UNQUOTE(%s)" + return lookup + + def conditional_expression_supported_in_where_clause(self, expression): + # MySQL ignores indexes with boolean fields unless they're compared + # directly to a boolean value. + if isinstance(expression, (Exists, Lookup)): + return True + if isinstance(expression, ExpressionWrapper) and expression.conditional: + return self.conditional_expression_supported_in_where_clause( + expression.expression + ) + if getattr(expression, "conditional", False): + return False + return super().conditional_expression_supported_in_where_clause(expression) + + def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields): + if on_conflict == OnConflict.UPDATE: + conflict_suffix_sql = "ON DUPLICATE KEY UPDATE %(fields)s" + # The use of VALUES() is deprecated in MySQL 8.0.20+. Instead, use + # aliases for the new row and its columns available in MySQL + # 8.0.19+. + if not self.connection.mysql_is_mariadb: + if self.connection.mysql_version >= (8, 0, 19): + conflict_suffix_sql = f"AS new {conflict_suffix_sql}" + field_sql = "%(field)s = new.%(field)s" + else: + field_sql = "%(field)s = VALUES(%(field)s)" + # Use VALUE() on MariaDB. + else: + field_sql = "%(field)s = VALUE(%(field)s)" + + fields = ", ".join( + [ + field_sql % {"field": field} + for field in map(self.quote_name, update_fields) + ] + ) + return conflict_suffix_sql % {"fields": fields} + return super().on_conflict_suffix_sql( + fields, + on_conflict, + update_fields, + unique_fields, + ) diff --git a/testbed/django__django/django/db/backends/mysql/schema.py b/testbed/django__django/django/db/backends/mysql/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..bfe5a2e8051d485e7c52113a9666b59aa3c7bcda --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/schema.py @@ -0,0 +1,273 @@ +from django.db.backends.base.schema import BaseDatabaseSchemaEditor +from django.db.models import NOT_PROVIDED, F, UniqueConstraint +from django.db.models.constants import LOOKUP_SEP + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s" + + sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL" + sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s%(comment)s" + sql_alter_column_no_default_null = "ALTER COLUMN %(column)s SET DEFAULT NULL" + + # No 'CASCADE' which works as a no-op in MySQL but is undocumented + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" + + sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s" + sql_create_column_inline_fk = ( + ", ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " + "REFERENCES %(to_table)s(%(to_column)s)" + ) + sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" + + sql_delete_index = "DROP INDEX %(name)s ON %(table)s" + sql_rename_index = "ALTER TABLE %(table)s RENAME INDEX %(old_name)s TO %(new_name)s" + + sql_create_pk = ( + "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + ) + sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" + + sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s" + + sql_alter_table_comment = "ALTER TABLE %(table)s COMMENT = %(comment)s" + sql_alter_column_comment = None + + @property + def sql_delete_check(self): + if self.connection.mysql_is_mariadb: + # The name of the column check constraint is the same as the field + # name on MariaDB. Adding IF EXISTS clause prevents migrations + # crash. Constraint is removed during a "MODIFY" column statement. + return "ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(name)s" + return "ALTER TABLE %(table)s DROP CHECK %(name)s" + + @property + def sql_rename_column(self): + # MariaDB >= 10.5.2 and MySQL >= 8.0.4 support an + # "ALTER TABLE ... RENAME COLUMN" statement. + if self.connection.mysql_is_mariadb: + if self.connection.mysql_version >= (10, 5, 2): + return super().sql_rename_column + elif self.connection.mysql_version >= (8, 0, 4): + return super().sql_rename_column + return "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s" + + def quote_value(self, value): + self.connection.ensure_connection() + # MySQLdb escapes to string, PyMySQL to bytes. + quoted = self.connection.connection.escape( + value, self.connection.connection.encoders + ) + if isinstance(value, str) and isinstance(quoted, bytes): + quoted = quoted.decode() + return quoted + + def _is_limited_data_type(self, field): + db_type = field.db_type(self.connection) + return ( + db_type is not None + and db_type.lower() in self.connection._limited_data_types + ) + + def skip_default(self, field): + if not self._supports_limited_data_type_defaults: + return self._is_limited_data_type(field) + return False + + def skip_default_on_alter(self, field): + if self._is_limited_data_type(field) and not self.connection.mysql_is_mariadb: + # MySQL doesn't support defaults for BLOB and TEXT in the + # ALTER COLUMN statement. + return True + return False + + @property + def _supports_limited_data_type_defaults(self): + # MariaDB and MySQL >= 8.0.13 support defaults for BLOB and TEXT. + if self.connection.mysql_is_mariadb: + return True + return self.connection.mysql_version >= (8, 0, 13) + + def _column_default_sql(self, field): + if ( + not self.connection.mysql_is_mariadb + and self._supports_limited_data_type_defaults + and self._is_limited_data_type(field) + ): + # MySQL supports defaults for BLOB and TEXT columns only if the + # default value is written as an expression i.e. in parentheses. + return "(%s)" + return super()._column_default_sql(field) + + def add_field(self, model, field): + super().add_field(model, field) + + # Simulate the effect of a one-off default. + # field.default may be unhashable, so a set isn't used for "in" check. + if self.skip_default(field) and field.default not in (None, NOT_PROVIDED): + effective_default = self.effective_default(field) + self.execute( + "UPDATE %(table)s SET %(column)s = %%s" + % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + }, + [effective_default], + ) + + def remove_constraint(self, model, constraint): + if ( + isinstance(constraint, UniqueConstraint) + and constraint.create_sql(model, self) is not None + ): + self._create_missing_fk_index( + model, + fields=constraint.fields, + expressions=constraint.expressions, + ) + super().remove_constraint(model, constraint) + + def remove_index(self, model, index): + self._create_missing_fk_index( + model, + fields=[field_name for field_name, _ in index.fields_orders], + expressions=index.expressions, + ) + super().remove_index(model, index) + + def _field_should_be_indexed(self, model, field): + if not super()._field_should_be_indexed(model, field): + return False + + storage = self.connection.introspection.get_storage_engine( + self.connection.cursor(), model._meta.db_table + ) + # No need to create an index for ForeignKey fields except if + # db_constraint=False because the index from that constraint won't be + # created. + if ( + storage == "InnoDB" + and field.get_internal_type() == "ForeignKey" + and field.db_constraint + ): + return False + return not self._is_limited_data_type(field) + + def _create_missing_fk_index( + self, + model, + *, + fields, + expressions=None, + ): + """ + MySQL can remove an implicit FK index on a field when that field is + covered by another index like a unique_together. "covered" here means + that the more complex index has the FK field as its first field (see + https://bugs.mysql.com/bug.php?id=37910). + + Manually create an implicit FK index to make it possible to remove the + composed index. + """ + first_field_name = None + if fields: + first_field_name = fields[0] + elif ( + expressions + and self.connection.features.supports_expression_indexes + and isinstance(expressions[0], F) + and LOOKUP_SEP not in expressions[0].name + ): + first_field_name = expressions[0].name + + if not first_field_name: + return + + first_field = model._meta.get_field(first_field_name) + if first_field.get_internal_type() == "ForeignKey": + column = self.connection.introspection.identifier_converter( + first_field.column + ) + with self.connection.cursor() as cursor: + constraint_names = [ + name + for name, infodict in self.connection.introspection.get_constraints( + cursor, model._meta.db_table + ).items() + if infodict["index"] and infodict["columns"][0] == column + ] + # There are no other indexes that starts with the FK field, only + # the index that is expected to be deleted. + if len(constraint_names) == 1: + self.execute( + self._create_index_sql(model, fields=[first_field], suffix="") + ) + + def _delete_composed_index(self, model, fields, *args): + self._create_missing_fk_index(model, fields=fields) + return super()._delete_composed_index(model, fields, *args) + + def _set_field_new_type(self, field, new_type): + """ + Keep the NULL and DEFAULT properties of the old field. If it has + changed, it will be handled separately. + """ + if field.db_default is not NOT_PROVIDED: + default_sql, params = self.db_default_sql(field) + default_sql %= tuple(self.quote_value(p) for p in params) + new_type += f" DEFAULT {default_sql}" + if field.null: + new_type += " NULL" + else: + new_type += " NOT NULL" + return new_type + + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): + new_type = self._set_field_new_type(old_field, new_type) + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + + def _field_db_check(self, field, field_db_params): + if self.connection.mysql_is_mariadb and self.connection.mysql_version >= ( + 10, + 5, + 2, + ): + return super()._field_db_check(field, field_db_params) + # On MySQL and MariaDB < 10.5.2 (no support for + # "ALTER TABLE ... RENAME COLUMN" statements), check constraints with + # the column name as it requires explicit recreation when the column is + # renamed. + return field_db_params["check"] + + def _rename_field_sql(self, table, old_field, new_field, new_type): + new_type = self._set_field_new_type(old_field, new_type) + return super()._rename_field_sql(table, old_field, new_field, new_type) + + def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): + # Comment is alter when altering the column type. + return "", [] + + def _comment_sql(self, comment): + comment_sql = super()._comment_sql(comment) + return f" COMMENT {comment_sql}" + + def _alter_column_null_sql(self, model, old_field, new_field): + if new_field.db_default is NOT_PROVIDED: + return super()._alter_column_null_sql(model, old_field, new_field) + + new_db_params = new_field.db_parameters(connection=self.connection) + type_sql = self._set_field_new_type(new_field, new_db_params["type"]) + return ( + "MODIFY %(column)s %(type)s" + % { + "column": self.quote_name(new_field.column), + "type": type_sql, + }, + [], + ) diff --git a/testbed/django__django/django/db/backends/mysql/validation.py b/testbed/django__django/django/db/backends/mysql/validation.py new file mode 100644 index 0000000000000000000000000000000000000000..fdc3809c12ddbbd6c809f3701925167734ae4bee --- /dev/null +++ b/testbed/django__django/django/db/backends/mysql/validation.py @@ -0,0 +1,77 @@ +from django.core import checks +from django.db.backends.base.validation import BaseDatabaseValidation +from django.utils.version import get_docs_version + + +class DatabaseValidation(BaseDatabaseValidation): + def check(self, **kwargs): + issues = super().check(**kwargs) + issues.extend(self._check_sql_mode(**kwargs)) + return issues + + def _check_sql_mode(self, **kwargs): + if not ( + self.connection.sql_mode & {"STRICT_TRANS_TABLES", "STRICT_ALL_TABLES"} + ): + return [ + checks.Warning( + "%s Strict Mode is not set for database connection '%s'" + % (self.connection.display_name, self.connection.alias), + hint=( + "%s's Strict Mode fixes many data integrity problems in " + "%s, such as data truncation upon insertion, by " + "escalating warnings into errors. It is strongly " + "recommended you activate it. See: " + "https://docs.djangoproject.com/en/%s/ref/databases/" + "#mysql-sql-mode" + % ( + self.connection.display_name, + self.connection.display_name, + get_docs_version(), + ), + ), + id="mysql.W002", + ) + ] + return [] + + def check_field_type(self, field, field_type): + """ + MySQL has the following field length restriction: + No character (varchar) fields can have a length exceeding 255 + characters if they have a unique index on them. + MySQL doesn't support a database index on some data types. + """ + errors = [] + if ( + field_type.startswith("varchar") + and field.unique + and (field.max_length is None or int(field.max_length) > 255) + ): + errors.append( + checks.Warning( + "%s may not allow unique CharFields to have a max_length " + "> 255." % self.connection.display_name, + obj=field, + hint=( + "See: https://docs.djangoproject.com/en/%s/ref/" + "databases/#mysql-character-fields" % get_docs_version() + ), + id="mysql.W003", + ) + ) + + if field.db_index and field_type.lower() in self.connection._limited_data_types: + errors.append( + checks.Warning( + "%s does not support a database index on %s columns." + % (self.connection.display_name, field_type), + hint=( + "An index won't be created. Silence this warning if " + "you don't care about it." + ), + obj=field, + id="fields.W162", + ) + ) + return errors diff --git a/testbed/django__django/django/db/backends/oracle/__init__.py b/testbed/django__django/django/db/backends/oracle/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/db/backends/oracle/base.py b/testbed/django__django/django/db/backends/oracle/base.py new file mode 100644 index 0000000000000000000000000000000000000000..845ab8ccf5ac7feba6a95428e026c8e2e4930d89 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/base.py @@ -0,0 +1,592 @@ +""" +Oracle database backend for Django. + +Requires cx_Oracle: https://oracle.github.io/python-cx_Oracle/ +""" +import datetime +import decimal +import os +import platform +from contextlib import contextmanager + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.db import IntegrityError +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.utils import debug_transaction +from django.utils.asyncio import async_unsafe +from django.utils.encoding import force_bytes, force_str +from django.utils.functional import cached_property + + +def _setup_environment(environ): + # Cygwin requires some special voodoo to set the environment variables + # properly so that Oracle will see them. + if platform.system().upper().startswith("CYGWIN"): + try: + import ctypes + except ImportError as e: + raise ImproperlyConfigured( + "Error loading ctypes: %s; " + "the Oracle backend requires ctypes to " + "operate correctly under Cygwin." % e + ) + kernel32 = ctypes.CDLL("kernel32") + for name, value in environ: + kernel32.SetEnvironmentVariableA(name, value) + else: + os.environ.update(environ) + + +_setup_environment( + [ + # Oracle takes client-side character set encoding from the environment. + ("NLS_LANG", ".AL32UTF8"), + # This prevents Unicode from getting mangled by getting encoded into the + # potentially non-Unicode database character set. + ("ORA_NCHAR_LITERAL_REPLACE", "TRUE"), + ] +) + + +try: + import cx_Oracle as Database +except ImportError as e: + raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e) + +# Some of these import cx_Oracle, so import them after checking if it's installed. +from .client import DatabaseClient # NOQA +from .creation import DatabaseCreation # NOQA +from .features import DatabaseFeatures # NOQA +from .introspection import DatabaseIntrospection # NOQA +from .operations import DatabaseOperations # NOQA +from .schema import DatabaseSchemaEditor # NOQA +from .utils import Oracle_datetime, dsn # NOQA +from .validation import DatabaseValidation # NOQA + + +@contextmanager +def wrap_oracle_errors(): + try: + yield + except Database.DatabaseError as e: + # cx_Oracle raises a cx_Oracle.DatabaseError exception with the + # following attributes and values: + # code = 2091 + # message = 'ORA-02091: transaction rolled back + # 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS + # _C00102056) violated - parent key not found' + # or: + # 'ORA-00001: unique constraint (DJANGOTEST.DEFERRABLE_ + # PINK_CONSTRAINT) violated + # Convert that case to Django's IntegrityError exception. + x = e.args[0] + if ( + hasattr(x, "code") + and hasattr(x, "message") + and x.code == 2091 + and ("ORA-02291" in x.message or "ORA-00001" in x.message) + ): + raise IntegrityError(*tuple(e.args)) + raise + + +class _UninitializedOperatorsDescriptor: + def __get__(self, instance, cls=None): + # If connection.operators is looked up before a connection has been + # created, transparently initialize connection.operators to avert an + # AttributeError. + if instance is None: + raise AttributeError("operators not available as class attribute") + # Creating a cursor will initialize the operators. + instance.cursor().close() + return instance.__dict__["operators"] + + +class DatabaseWrapper(BaseDatabaseWrapper): + vendor = "oracle" + display_name = "Oracle" + # This dictionary maps Field objects to their associated Oracle column + # types, as strings. Column-type strings can contain format strings; they'll + # be interpolated against the values of Field.__dict__ before being output. + # If a column type is set to None, it won't be included in the output. + # + # Any format strings starting with "qn_" are quoted before being used in the + # output (the "qn_" prefix is stripped before the lookup is performed. + data_types = { + "AutoField": "NUMBER(11) GENERATED BY DEFAULT ON NULL AS IDENTITY", + "BigAutoField": "NUMBER(19) GENERATED BY DEFAULT ON NULL AS IDENTITY", + "BinaryField": "BLOB", + "BooleanField": "NUMBER(1)", + "CharField": "NVARCHAR2(%(max_length)s)", + "DateField": "DATE", + "DateTimeField": "TIMESTAMP", + "DecimalField": "NUMBER(%(max_digits)s, %(decimal_places)s)", + "DurationField": "INTERVAL DAY(9) TO SECOND(6)", + "FileField": "NVARCHAR2(%(max_length)s)", + "FilePathField": "NVARCHAR2(%(max_length)s)", + "FloatField": "DOUBLE PRECISION", + "IntegerField": "NUMBER(11)", + "JSONField": "NCLOB", + "BigIntegerField": "NUMBER(19)", + "IPAddressField": "VARCHAR2(15)", + "GenericIPAddressField": "VARCHAR2(39)", + "OneToOneField": "NUMBER(11)", + "PositiveBigIntegerField": "NUMBER(19)", + "PositiveIntegerField": "NUMBER(11)", + "PositiveSmallIntegerField": "NUMBER(11)", + "SlugField": "NVARCHAR2(%(max_length)s)", + "SmallAutoField": "NUMBER(5) GENERATED BY DEFAULT ON NULL AS IDENTITY", + "SmallIntegerField": "NUMBER(11)", + "TextField": "NCLOB", + "TimeField": "TIMESTAMP", + "URLField": "VARCHAR2(%(max_length)s)", + "UUIDField": "VARCHAR2(32)", + } + data_type_check_constraints = { + "BooleanField": "%(qn_column)s IN (0,1)", + "JSONField": "%(qn_column)s IS JSON", + "PositiveBigIntegerField": "%(qn_column)s >= 0", + "PositiveIntegerField": "%(qn_column)s >= 0", + "PositiveSmallIntegerField": "%(qn_column)s >= 0", + } + + # Oracle doesn't support a database index on these columns. + _limited_data_types = ("clob", "nclob", "blob") + + operators = _UninitializedOperatorsDescriptor() + + _standard_operators = { + "exact": "= %s", + "iexact": "= UPPER(%s)", + "contains": ( + "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + "icontains": ( + "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) " + "ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + "gt": "> %s", + "gte": ">= %s", + "lt": "< %s", + "lte": "<= %s", + "startswith": ( + "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + "endswith": ( + "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + "istartswith": ( + "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) " + "ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + "iendswith": ( + "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) " + "ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + ), + } + + _likec_operators = { + **_standard_operators, + "contains": "LIKEC %s ESCAPE '\\'", + "icontains": "LIKEC UPPER(%s) ESCAPE '\\'", + "startswith": "LIKEC %s ESCAPE '\\'", + "endswith": "LIKEC %s ESCAPE '\\'", + "istartswith": "LIKEC UPPER(%s) ESCAPE '\\'", + "iendswith": "LIKEC UPPER(%s) ESCAPE '\\'", + } + + # The patterns below are used to generate SQL pattern lookup clauses when + # the right-hand side of the lookup isn't a raw string (it might be an expression + # or the result of a bilateral transformation). + # In those cases, special characters for LIKE operators (e.g. \, %, _) + # should be escaped on the database side. + # + # Note: we use str.format() here for readability as '%' is used as a wildcard for + # the LIKE operator. + pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')" + _pattern_ops = { + "contains": "'%%' || {} || '%%'", + "icontains": "'%%' || UPPER({}) || '%%'", + "startswith": "{} || '%%'", + "istartswith": "UPPER({}) || '%%'", + "endswith": "'%%' || {}", + "iendswith": "'%%' || UPPER({})", + } + + _standard_pattern_ops = { + k: "LIKE TRANSLATE( " + v + " USING NCHAR_CS)" + " ESCAPE TRANSLATE('\\' USING NCHAR_CS)" + for k, v in _pattern_ops.items() + } + _likec_pattern_ops = { + k: "LIKEC " + v + " ESCAPE '\\'" for k, v in _pattern_ops.items() + } + + Database = Database + SchemaEditorClass = DatabaseSchemaEditor + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + validation_class = DatabaseValidation + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + use_returning_into = self.settings_dict["OPTIONS"].get( + "use_returning_into", True + ) + self.features.can_return_columns_from_insert = use_returning_into + + def get_database_version(self): + return self.oracle_version + + def get_connection_params(self): + conn_params = self.settings_dict["OPTIONS"].copy() + if "use_returning_into" in conn_params: + del conn_params["use_returning_into"] + return conn_params + + @async_unsafe + def get_new_connection(self, conn_params): + return Database.connect( + user=self.settings_dict["USER"], + password=self.settings_dict["PASSWORD"], + dsn=dsn(self.settings_dict), + **conn_params, + ) + + def init_connection_state(self): + super().init_connection_state() + cursor = self.create_cursor() + # Set the territory first. The territory overrides NLS_DATE_FORMAT + # and NLS_TIMESTAMP_FORMAT to the territory default. When all of + # these are set in single statement it isn't clear what is supposed + # to happen. + cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") + # Set Oracle date to ANSI date format. This only needs to execute + # once when we create a new connection. We also set the Territory + # to 'AMERICA' which forces Sunday to evaluate to a '1' in + # TO_CHAR(). + cursor.execute( + "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" + " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" + + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else "") + ) + cursor.close() + if "operators" not in self.__dict__: + # Ticket #14149: Check whether our LIKE implementation will + # work for this connection or we need to fall back on LIKEC. + # This check is performed only once per DatabaseWrapper + # instance per thread, since subsequent connections will use + # the same settings. + cursor = self.create_cursor() + try: + cursor.execute( + "SELECT 1 FROM DUAL WHERE DUMMY %s" + % self._standard_operators["contains"], + ["X"], + ) + except Database.DatabaseError: + self.operators = self._likec_operators + self.pattern_ops = self._likec_pattern_ops + else: + self.operators = self._standard_operators + self.pattern_ops = self._standard_pattern_ops + cursor.close() + self.connection.stmtcachesize = 20 + # Ensure all changes are preserved even when AUTOCOMMIT is False. + if not self.get_autocommit(): + self.commit() + + @async_unsafe + def create_cursor(self, name=None): + return FormatStylePlaceholderCursor(self.connection) + + def _commit(self): + if self.connection is not None: + with debug_transaction(self, "COMMIT"), wrap_oracle_errors(): + return self.connection.commit() + + # Oracle doesn't support releasing savepoints. But we fake them when query + # logging is enabled to keep query counts consistent with other backends. + def _savepoint_commit(self, sid): + if self.queries_logged: + self.queries_log.append( + { + "sql": "-- RELEASE SAVEPOINT %s (faked)" % self.ops.quote_name(sid), + "time": "0.000", + } + ) + + def _set_autocommit(self, autocommit): + with self.wrap_database_errors: + self.connection.autocommit = autocommit + + def check_constraints(self, table_names=None): + """ + Check constraints by setting them to immediate. Return them to deferred + afterward. + """ + with self.cursor() as cursor: + cursor.execute("SET CONSTRAINTS ALL IMMEDIATE") + cursor.execute("SET CONSTRAINTS ALL DEFERRED") + + def is_usable(self): + try: + self.connection.ping() + except Database.Error: + return False + else: + return True + + @cached_property + def cx_oracle_version(self): + return tuple(int(x) for x in Database.version.split(".")) + + @cached_property + def oracle_version(self): + with self.temporary_connection(): + return tuple(int(x) for x in self.connection.version.split(".")) + + +class OracleParam: + """ + Wrapper object for formatting parameters for Oracle. If the string + representation of the value is large enough (greater than 4000 characters) + the input size needs to be set as CLOB. Alternatively, if the parameter + has an `input_size` attribute, then the value of the `input_size` attribute + will be used instead. Otherwise, no input size will be set for the + parameter when executing the query. + """ + + def __init__(self, param, cursor, strings_only=False): + # With raw SQL queries, datetimes can reach this function + # without being converted by DateTimeField.get_db_prep_value. + if settings.USE_TZ and ( + isinstance(param, datetime.datetime) + and not isinstance(param, Oracle_datetime) + ): + param = Oracle_datetime.from_datetime(param) + + string_size = 0 + # Oracle doesn't recognize True and False correctly. + if param is True: + param = 1 + elif param is False: + param = 0 + if hasattr(param, "bind_parameter"): + self.force_bytes = param.bind_parameter(cursor) + elif isinstance(param, (Database.Binary, datetime.timedelta)): + self.force_bytes = param + else: + # To transmit to the database, we need Unicode if supported + # To get size right, we must consider bytes. + self.force_bytes = force_str(param, cursor.charset, strings_only) + if isinstance(self.force_bytes, str): + # We could optimize by only converting up to 4000 bytes here + string_size = len(force_bytes(param, cursor.charset, strings_only)) + if hasattr(param, "input_size"): + # If parameter has `input_size` attribute, use that. + self.input_size = param.input_size + elif string_size > 4000: + # Mark any string param greater than 4000 characters as a CLOB. + self.input_size = Database.CLOB + elif isinstance(param, datetime.datetime): + self.input_size = Database.TIMESTAMP + else: + self.input_size = None + + +class VariableWrapper: + """ + An adapter class for cursor variables that prevents the wrapped object + from being converted into a string when used to instantiate an OracleParam. + This can be used generally for any other object that should be passed into + Cursor.execute as-is. + """ + + def __init__(self, var): + self.var = var + + def bind_parameter(self, cursor): + return self.var + + def __getattr__(self, key): + return getattr(self.var, key) + + def __setattr__(self, key, value): + if key == "var": + self.__dict__[key] = value + else: + setattr(self.var, key, value) + + +class FormatStylePlaceholderCursor: + """ + Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var" + style. This fixes it -- but note that if you want to use a literal "%s" in + a query, you'll need to use "%%s". + """ + + charset = "utf-8" + + def __init__(self, connection): + self.cursor = connection.cursor() + self.cursor.outputtypehandler = self._output_type_handler + + @staticmethod + def _output_number_converter(value): + return decimal.Decimal(value) if "." in value else int(value) + + @staticmethod + def _get_decimal_converter(precision, scale): + if scale == 0: + return int + context = decimal.Context(prec=precision) + quantize_value = decimal.Decimal(1).scaleb(-scale) + return lambda v: decimal.Decimal(v).quantize(quantize_value, context=context) + + @staticmethod + def _output_type_handler(cursor, name, defaultType, length, precision, scale): + """ + Called for each db column fetched from cursors. Return numbers as the + appropriate Python type. + """ + if defaultType == Database.NUMBER: + if scale == -127: + if precision == 0: + # NUMBER column: decimal-precision floating point. + # This will normally be an integer from a sequence, + # but it could be a decimal value. + outconverter = FormatStylePlaceholderCursor._output_number_converter + else: + # FLOAT column: binary-precision floating point. + # This comes from FloatField columns. + outconverter = float + elif precision > 0: + # NUMBER(p,s) column: decimal-precision fixed point. + # This comes from IntegerField and DecimalField columns. + outconverter = FormatStylePlaceholderCursor._get_decimal_converter( + precision, scale + ) + else: + # No type information. This normally comes from a + # mathematical expression in the SELECT list. Guess int + # or Decimal based on whether it has a decimal point. + outconverter = FormatStylePlaceholderCursor._output_number_converter + return cursor.var( + Database.STRING, + size=255, + arraysize=cursor.arraysize, + outconverter=outconverter, + ) + + def _format_params(self, params): + try: + return {k: OracleParam(v, self, True) for k, v in params.items()} + except AttributeError: + return tuple(OracleParam(p, self, True) for p in params) + + def _guess_input_sizes(self, params_list): + # Try dict handling; if that fails, treat as sequence + if hasattr(params_list[0], "keys"): + sizes = {} + for params in params_list: + for k, value in params.items(): + if value.input_size: + sizes[k] = value.input_size + if sizes: + self.setinputsizes(**sizes) + else: + # It's not a list of dicts; it's a list of sequences + sizes = [None] * len(params_list[0]) + for params in params_list: + for i, value in enumerate(params): + if value.input_size: + sizes[i] = value.input_size + if sizes: + self.setinputsizes(*sizes) + + def _param_generator(self, params): + # Try dict handling; if that fails, treat as sequence + if hasattr(params, "items"): + return {k: v.force_bytes for k, v in params.items()} + else: + return [p.force_bytes for p in params] + + def _fix_for_params(self, query, params, unify_by_values=False): + # cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it + # it does want a trailing ';' but not a trailing '/'. However, these + # characters must be included in the original query in case the query + # is being passed to SQL*Plus. + if query.endswith(";") or query.endswith("/"): + query = query[:-1] + if params is None: + params = [] + elif hasattr(params, "keys"): + # Handle params as dict + args = {k: ":%s" % k for k in params} + query %= args + elif unify_by_values and params: + # Handle params as a dict with unified query parameters by their + # values. It can be used only in single query execute() because + # executemany() shares the formatted query with each of the params + # list. e.g. for input params = [0.75, 2, 0.75, 'sth', 0.75] + # params_dict = {0.75: ':arg0', 2: ':arg1', 'sth': ':arg2'} + # args = [':arg0', ':arg1', ':arg0', ':arg2', ':arg0'] + # params = {':arg0': 0.75, ':arg1': 2, ':arg2': 'sth'} + params_dict = { + param: ":arg%d" % i for i, param in enumerate(dict.fromkeys(params)) + } + args = [params_dict[param] for param in params] + params = {value: key for key, value in params_dict.items()} + query %= tuple(args) + else: + # Handle params as sequence + args = [(":arg%d" % i) for i in range(len(params))] + query %= tuple(args) + return query, self._format_params(params) + + def execute(self, query, params=None): + query, params = self._fix_for_params(query, params, unify_by_values=True) + self._guess_input_sizes([params]) + with wrap_oracle_errors(): + return self.cursor.execute(query, self._param_generator(params)) + + def executemany(self, query, params=None): + if not params: + # No params given, nothing to do + return None + # uniform treatment for sequences and iterables + params_iter = iter(params) + query, firstparams = self._fix_for_params(query, next(params_iter)) + # we build a list of formatted params; as we're going to traverse it + # more than once, we can't make it lazy by using a generator + formatted = [firstparams] + [self._format_params(p) for p in params_iter] + self._guess_input_sizes(formatted) + with wrap_oracle_errors(): + return self.cursor.executemany( + query, [self._param_generator(p) for p in formatted] + ) + + def close(self): + try: + self.cursor.close() + except Database.InterfaceError: + # already closed + pass + + def var(self, *args): + return VariableWrapper(self.cursor.var(*args)) + + def arrayvar(self, *args): + return VariableWrapper(self.cursor.arrayvar(*args)) + + def __getattr__(self, attr): + return getattr(self.cursor, attr) + + def __iter__(self): + return iter(self.cursor) diff --git a/testbed/django__django/django/db/backends/oracle/client.py b/testbed/django__django/django/db/backends/oracle/client.py new file mode 100644 index 0000000000000000000000000000000000000000..365b1160464684c23491ff8ceb5cec7c177ddba2 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/client.py @@ -0,0 +1,27 @@ +import shutil + +from django.db.backends.base.client import BaseDatabaseClient + + +class DatabaseClient(BaseDatabaseClient): + executable_name = "sqlplus" + wrapper_name = "rlwrap" + + @staticmethod + def connect_string(settings_dict): + from django.db.backends.oracle.utils import dsn + + return '%s/"%s"@%s' % ( + settings_dict["USER"], + settings_dict["PASSWORD"], + dsn(settings_dict), + ) + + @classmethod + def settings_to_cmd_args_env(cls, settings_dict, parameters): + args = [cls.executable_name, "-L", cls.connect_string(settings_dict)] + wrapper_path = shutil.which(cls.wrapper_name) + if wrapper_path: + args = [wrapper_path, *args] + args.extend(parameters) + return args, None diff --git a/testbed/django__django/django/db/backends/oracle/creation.py b/testbed/django__django/django/db/backends/oracle/creation.py new file mode 100644 index 0000000000000000000000000000000000000000..df773bff8c20cbc648fab998f56a415faabcdbd7 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/creation.py @@ -0,0 +1,464 @@ +import sys + +from django.conf import settings +from django.db import DatabaseError +from django.db.backends.base.creation import BaseDatabaseCreation +from django.utils.crypto import get_random_string +from django.utils.functional import cached_property + +TEST_DATABASE_PREFIX = "test_" + + +class DatabaseCreation(BaseDatabaseCreation): + @cached_property + def _maindb_connection(self): + """ + This is analogous to other backends' `_nodb_connection` property, + which allows access to an "administrative" connection which can + be used to manage the test databases. + For Oracle, the only connection that can be used for that purpose + is the main (non-test) connection. + """ + settings_dict = settings.DATABASES[self.connection.alias] + user = settings_dict.get("SAVED_USER") or settings_dict["USER"] + password = settings_dict.get("SAVED_PASSWORD") or settings_dict["PASSWORD"] + settings_dict = {**settings_dict, "USER": user, "PASSWORD": password} + DatabaseWrapper = type(self.connection) + return DatabaseWrapper(settings_dict, alias=self.connection.alias) + + def _create_test_db(self, verbosity=1, autoclobber=False, keepdb=False): + parameters = self._get_test_db_params() + with self._maindb_connection.cursor() as cursor: + if self._test_database_create(): + try: + self._execute_test_db_creation( + cursor, parameters, verbosity, keepdb + ) + except Exception as e: + if "ORA-01543" not in str(e): + # All errors except "tablespace already exists" cancel tests + self.log("Got an error creating the test database: %s" % e) + sys.exit(2) + if not autoclobber: + confirm = input( + "It appears the test database, %s, already exists. " + "Type 'yes' to delete it, or 'no' to cancel: " + % parameters["user"] + ) + if autoclobber or confirm == "yes": + if verbosity >= 1: + self.log( + "Destroying old test database for alias '%s'..." + % self.connection.alias + ) + try: + self._execute_test_db_destruction( + cursor, parameters, verbosity + ) + except DatabaseError as e: + if "ORA-29857" in str(e): + self._handle_objects_preventing_db_destruction( + cursor, parameters, verbosity, autoclobber + ) + else: + # Ran into a database error that isn't about + # leftover objects in the tablespace. + self.log( + "Got an error destroying the old test database: %s" + % e + ) + sys.exit(2) + except Exception as e: + self.log( + "Got an error destroying the old test database: %s" % e + ) + sys.exit(2) + try: + self._execute_test_db_creation( + cursor, parameters, verbosity, keepdb + ) + except Exception as e: + self.log( + "Got an error recreating the test database: %s" % e + ) + sys.exit(2) + else: + self.log("Tests cancelled.") + sys.exit(1) + + if self._test_user_create(): + if verbosity >= 1: + self.log("Creating test user...") + try: + self._create_test_user(cursor, parameters, verbosity, keepdb) + except Exception as e: + if "ORA-01920" not in str(e): + # All errors except "user already exists" cancel tests + self.log("Got an error creating the test user: %s" % e) + sys.exit(2) + if not autoclobber: + confirm = input( + "It appears the test user, %s, already exists. Type " + "'yes' to delete it, or 'no' to cancel: " + % parameters["user"] + ) + if autoclobber or confirm == "yes": + try: + if verbosity >= 1: + self.log("Destroying old test user...") + self._destroy_test_user(cursor, parameters, verbosity) + if verbosity >= 1: + self.log("Creating test user...") + self._create_test_user( + cursor, parameters, verbosity, keepdb + ) + except Exception as e: + self.log("Got an error recreating the test user: %s" % e) + sys.exit(2) + else: + self.log("Tests cancelled.") + sys.exit(1) + # Done with main user -- test user and tablespaces created. + self._maindb_connection.close() + self._switch_to_test_user(parameters) + return self.connection.settings_dict["NAME"] + + def _switch_to_test_user(self, parameters): + """ + Switch to the user that's used for creating the test database. + + Oracle doesn't have the concept of separate databases under the same + user, so a separate user is used; see _create_test_db(). The main user + is also needed for cleanup when testing is completed, so save its + credentials in the SAVED_USER/SAVED_PASSWORD key in the settings dict. + """ + real_settings = settings.DATABASES[self.connection.alias] + real_settings["SAVED_USER"] = self.connection.settings_dict[ + "SAVED_USER" + ] = self.connection.settings_dict["USER"] + real_settings["SAVED_PASSWORD"] = self.connection.settings_dict[ + "SAVED_PASSWORD" + ] = self.connection.settings_dict["PASSWORD"] + real_test_settings = real_settings["TEST"] + test_settings = self.connection.settings_dict["TEST"] + real_test_settings["USER"] = real_settings["USER"] = test_settings[ + "USER" + ] = self.connection.settings_dict["USER"] = parameters["user"] + real_settings["PASSWORD"] = self.connection.settings_dict[ + "PASSWORD" + ] = parameters["password"] + + def set_as_test_mirror(self, primary_settings_dict): + """ + Set this database up to be used in testing as a mirror of a primary + database whose settings are given. + """ + self.connection.settings_dict["USER"] = primary_settings_dict["USER"] + self.connection.settings_dict["PASSWORD"] = primary_settings_dict["PASSWORD"] + + def _handle_objects_preventing_db_destruction( + self, cursor, parameters, verbosity, autoclobber + ): + # There are objects in the test tablespace which prevent dropping it + # The easy fix is to drop the test user -- but are we allowed to do so? + self.log( + "There are objects in the old test database which prevent its destruction." + "\nIf they belong to the test user, deleting the user will allow the test " + "database to be recreated.\n" + "Otherwise, you will need to find and remove each of these objects, " + "or use a different tablespace.\n" + ) + if self._test_user_create(): + if not autoclobber: + confirm = input("Type 'yes' to delete user %s: " % parameters["user"]) + if autoclobber or confirm == "yes": + try: + if verbosity >= 1: + self.log("Destroying old test user...") + self._destroy_test_user(cursor, parameters, verbosity) + except Exception as e: + self.log("Got an error destroying the test user: %s" % e) + sys.exit(2) + try: + if verbosity >= 1: + self.log( + "Destroying old test database for alias '%s'..." + % self.connection.alias + ) + self._execute_test_db_destruction(cursor, parameters, verbosity) + except Exception as e: + self.log("Got an error destroying the test database: %s" % e) + sys.exit(2) + else: + self.log("Tests cancelled -- test database cannot be recreated.") + sys.exit(1) + else: + self.log( + "Django is configured to use pre-existing test user '%s'," + " and will not attempt to delete it." % parameters["user"] + ) + self.log("Tests cancelled -- test database cannot be recreated.") + sys.exit(1) + + def _destroy_test_db(self, test_database_name, verbosity=1): + """ + Destroy a test database, prompting the user for confirmation if the + database already exists. Return the name of the test database created. + """ + self.connection.settings_dict["USER"] = self.connection.settings_dict[ + "SAVED_USER" + ] + self.connection.settings_dict["PASSWORD"] = self.connection.settings_dict[ + "SAVED_PASSWORD" + ] + self.connection.close() + parameters = self._get_test_db_params() + with self._maindb_connection.cursor() as cursor: + if self._test_user_create(): + if verbosity >= 1: + self.log("Destroying test user...") + self._destroy_test_user(cursor, parameters, verbosity) + if self._test_database_create(): + if verbosity >= 1: + self.log("Destroying test database tables...") + self._execute_test_db_destruction(cursor, parameters, verbosity) + self._maindb_connection.close() + + def _execute_test_db_creation(self, cursor, parameters, verbosity, keepdb=False): + if verbosity >= 2: + self.log("_create_test_db(): dbname = %s" % parameters["user"]) + if self._test_database_oracle_managed_files(): + statements = [ + """ + CREATE TABLESPACE %(tblspace)s + DATAFILE SIZE %(size)s + AUTOEXTEND ON NEXT %(extsize)s MAXSIZE %(maxsize)s + """, + """ + CREATE TEMPORARY TABLESPACE %(tblspace_temp)s + TEMPFILE SIZE %(size_tmp)s + AUTOEXTEND ON NEXT %(extsize_tmp)s MAXSIZE %(maxsize_tmp)s + """, + ] + else: + statements = [ + """ + CREATE TABLESPACE %(tblspace)s + DATAFILE '%(datafile)s' SIZE %(size)s REUSE + AUTOEXTEND ON NEXT %(extsize)s MAXSIZE %(maxsize)s + """, + """ + CREATE TEMPORARY TABLESPACE %(tblspace_temp)s + TEMPFILE '%(datafile_tmp)s' SIZE %(size_tmp)s REUSE + AUTOEXTEND ON NEXT %(extsize_tmp)s MAXSIZE %(maxsize_tmp)s + """, + ] + # Ignore "tablespace already exists" error when keepdb is on. + acceptable_ora_err = "ORA-01543" if keepdb else None + self._execute_allow_fail_statements( + cursor, statements, parameters, verbosity, acceptable_ora_err + ) + + def _create_test_user(self, cursor, parameters, verbosity, keepdb=False): + if verbosity >= 2: + self.log("_create_test_user(): username = %s" % parameters["user"]) + statements = [ + """CREATE USER %(user)s + IDENTIFIED BY "%(password)s" + DEFAULT TABLESPACE %(tblspace)s + TEMPORARY TABLESPACE %(tblspace_temp)s + QUOTA UNLIMITED ON %(tblspace)s + """, + """GRANT CREATE SESSION, + CREATE TABLE, + CREATE SEQUENCE, + CREATE PROCEDURE, + CREATE TRIGGER + TO %(user)s""", + ] + # Ignore "user already exists" error when keepdb is on + acceptable_ora_err = "ORA-01920" if keepdb else None + success = self._execute_allow_fail_statements( + cursor, statements, parameters, verbosity, acceptable_ora_err + ) + # If the password was randomly generated, change the user accordingly. + if not success and self._test_settings_get("PASSWORD") is None: + set_password = 'ALTER USER %(user)s IDENTIFIED BY "%(password)s"' + self._execute_statements(cursor, [set_password], parameters, verbosity) + # Most test suites can be run without "create view" and + # "create materialized view" privileges. But some need it. + for object_type in ("VIEW", "MATERIALIZED VIEW"): + extra = "GRANT CREATE %(object_type)s TO %(user)s" + parameters["object_type"] = object_type + success = self._execute_allow_fail_statements( + cursor, [extra], parameters, verbosity, "ORA-01031" + ) + if not success and verbosity >= 2: + self.log( + "Failed to grant CREATE %s permission to test user. This may be ok." + % object_type + ) + + def _execute_test_db_destruction(self, cursor, parameters, verbosity): + if verbosity >= 2: + self.log("_execute_test_db_destruction(): dbname=%s" % parameters["user"]) + statements = [ + "DROP TABLESPACE %(tblspace)s " + "INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS", + "DROP TABLESPACE %(tblspace_temp)s " + "INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS", + ] + self._execute_statements(cursor, statements, parameters, verbosity) + + def _destroy_test_user(self, cursor, parameters, verbosity): + if verbosity >= 2: + self.log("_destroy_test_user(): user=%s" % parameters["user"]) + self.log("Be patient. This can take some time...") + statements = [ + "DROP USER %(user)s CASCADE", + ] + self._execute_statements(cursor, statements, parameters, verbosity) + + def _execute_statements( + self, cursor, statements, parameters, verbosity, allow_quiet_fail=False + ): + for template in statements: + stmt = template % parameters + if verbosity >= 2: + print(stmt) + try: + cursor.execute(stmt) + except Exception as err: + if (not allow_quiet_fail) or verbosity >= 2: + self.log("Failed (%s)" % (err)) + raise + + def _execute_allow_fail_statements( + self, cursor, statements, parameters, verbosity, acceptable_ora_err + ): + """ + Execute statements which are allowed to fail silently if the Oracle + error code given by `acceptable_ora_err` is raised. Return True if the + statements execute without an exception, or False otherwise. + """ + try: + # Statement can fail when acceptable_ora_err is not None + allow_quiet_fail = ( + acceptable_ora_err is not None and len(acceptable_ora_err) > 0 + ) + self._execute_statements( + cursor, + statements, + parameters, + verbosity, + allow_quiet_fail=allow_quiet_fail, + ) + return True + except DatabaseError as err: + description = str(err) + if acceptable_ora_err is None or acceptable_ora_err not in description: + raise + return False + + def _get_test_db_params(self): + return { + "dbname": self._test_database_name(), + "user": self._test_database_user(), + "password": self._test_database_passwd(), + "tblspace": self._test_database_tblspace(), + "tblspace_temp": self._test_database_tblspace_tmp(), + "datafile": self._test_database_tblspace_datafile(), + "datafile_tmp": self._test_database_tblspace_tmp_datafile(), + "maxsize": self._test_database_tblspace_maxsize(), + "maxsize_tmp": self._test_database_tblspace_tmp_maxsize(), + "size": self._test_database_tblspace_size(), + "size_tmp": self._test_database_tblspace_tmp_size(), + "extsize": self._test_database_tblspace_extsize(), + "extsize_tmp": self._test_database_tblspace_tmp_extsize(), + } + + def _test_settings_get(self, key, default=None, prefixed=None): + """ + Return a value from the test settings dict, or a given default, or a + prefixed entry from the main settings dict. + """ + settings_dict = self.connection.settings_dict + val = settings_dict["TEST"].get(key, default) + if val is None and prefixed: + val = TEST_DATABASE_PREFIX + settings_dict[prefixed] + return val + + def _test_database_name(self): + return self._test_settings_get("NAME", prefixed="NAME") + + def _test_database_create(self): + return self._test_settings_get("CREATE_DB", default=True) + + def _test_user_create(self): + return self._test_settings_get("CREATE_USER", default=True) + + def _test_database_user(self): + return self._test_settings_get("USER", prefixed="USER") + + def _test_database_passwd(self): + password = self._test_settings_get("PASSWORD") + if password is None and self._test_user_create(): + # Oracle passwords are limited to 30 chars and can't contain symbols. + password = get_random_string(30) + return password + + def _test_database_tblspace(self): + return self._test_settings_get("TBLSPACE", prefixed="USER") + + def _test_database_tblspace_tmp(self): + settings_dict = self.connection.settings_dict + return settings_dict["TEST"].get( + "TBLSPACE_TMP", TEST_DATABASE_PREFIX + settings_dict["USER"] + "_temp" + ) + + def _test_database_tblspace_datafile(self): + tblspace = "%s.dbf" % self._test_database_tblspace() + return self._test_settings_get("DATAFILE", default=tblspace) + + def _test_database_tblspace_tmp_datafile(self): + tblspace = "%s.dbf" % self._test_database_tblspace_tmp() + return self._test_settings_get("DATAFILE_TMP", default=tblspace) + + def _test_database_tblspace_maxsize(self): + return self._test_settings_get("DATAFILE_MAXSIZE", default="500M") + + def _test_database_tblspace_tmp_maxsize(self): + return self._test_settings_get("DATAFILE_TMP_MAXSIZE", default="500M") + + def _test_database_tblspace_size(self): + return self._test_settings_get("DATAFILE_SIZE", default="50M") + + def _test_database_tblspace_tmp_size(self): + return self._test_settings_get("DATAFILE_TMP_SIZE", default="50M") + + def _test_database_tblspace_extsize(self): + return self._test_settings_get("DATAFILE_EXTSIZE", default="25M") + + def _test_database_tblspace_tmp_extsize(self): + return self._test_settings_get("DATAFILE_TMP_EXTSIZE", default="25M") + + def _test_database_oracle_managed_files(self): + return self._test_settings_get("ORACLE_MANAGED_FILES", default=False) + + def _get_test_db_name(self): + """ + Return the 'production' DB name to get the test DB creation machinery + to work. This isn't a great deal in this case because DB names as + handled by Django don't have real counterparts in Oracle. + """ + return self.connection.settings_dict["NAME"] + + def test_db_signature(self): + settings_dict = self.connection.settings_dict + return ( + settings_dict["HOST"], + settings_dict["PORT"], + settings_dict["ENGINE"], + settings_dict["NAME"], + self._test_database_user(), + ) diff --git a/testbed/django__django/django/db/backends/oracle/features.py b/testbed/django__django/django/db/backends/oracle/features.py new file mode 100644 index 0000000000000000000000000000000000000000..2ef9e4300c926534191e19678e4bf4151c5ff055 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/features.py @@ -0,0 +1,159 @@ +from django.db import DatabaseError, InterfaceError +from django.db.backends.base.features import BaseDatabaseFeatures +from django.utils.functional import cached_property + + +class DatabaseFeatures(BaseDatabaseFeatures): + minimum_database_version = (19,) + # Oracle crashes with "ORA-00932: inconsistent datatypes: expected - got + # BLOB" when grouping by LOBs (#24096). + allows_group_by_lob = False + allows_group_by_select_index = False + interprets_empty_strings_as_nulls = True + has_select_for_update = True + has_select_for_update_nowait = True + has_select_for_update_skip_locked = True + has_select_for_update_of = True + select_for_update_of_column = True + can_return_columns_from_insert = True + supports_subqueries_in_group_by = False + ignores_unnecessary_order_by_in_subqueries = False + supports_transactions = True + supports_timezones = False + has_native_duration_field = True + can_defer_constraint_checks = True + supports_partially_nullable_unique_constraints = False + supports_deferrable_unique_constraints = True + truncates_names = True + supports_comments = True + supports_tablespaces = True + supports_sequence_reset = False + can_introspect_materialized_views = True + atomic_transactions = False + nulls_order_largest = True + requires_literal_defaults = True + supports_default_keyword_in_bulk_insert = False + closed_cursor_error_class = InterfaceError + bare_select_suffix = " FROM DUAL" + # Select for update with limit can be achieved on Oracle, but not with the + # current backend. + supports_select_for_update_with_limit = False + supports_temporal_subtraction = True + # Oracle doesn't ignore quoted identifiers case but the current backend + # does by uppercasing all identifiers. + ignores_table_name_case = True + supports_index_on_text_field = False + create_test_procedure_without_params_sql = """ + CREATE PROCEDURE "TEST_PROCEDURE" AS + V_I INTEGER; + BEGIN + V_I := 1; + END; + """ + create_test_procedure_with_int_param_sql = """ + CREATE PROCEDURE "TEST_PROCEDURE" (P_I INTEGER) AS + V_I INTEGER; + BEGIN + V_I := P_I; + END; + """ + create_test_table_with_composite_primary_key = """ + CREATE TABLE test_table_composite_pk ( + column_1 NUMBER(11) NOT NULL, + column_2 NUMBER(11) NOT NULL, + PRIMARY KEY (column_1, column_2) + ) + """ + supports_callproc_kwargs = True + supports_over_clause = True + supports_frame_range_fixed_distance = True + supports_ignore_conflicts = False + max_query_params = 2**16 - 1 + supports_partial_indexes = False + can_rename_index = True + supports_slicing_ordering_in_compound = True + requires_compound_order_by_subquery = True + allows_multiple_constraints_on_same_fields = False + supports_boolean_expr_in_select_clause = False + supports_comparing_boolean_expr = False + supports_primitives_in_json_field = False + supports_json_field_contains = False + supports_collation_on_textfield = False + test_collations = { + "ci": "BINARY_CI", + "cs": "BINARY", + "non_default": "SWEDISH_CI", + "swedish_ci": "SWEDISH_CI", + } + test_now_utc_template = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'" + + django_test_skips = { + "Oracle doesn't support SHA224.": { + "db_functions.text.test_sha224.SHA224Tests.test_basic", + "db_functions.text.test_sha224.SHA224Tests.test_transform", + }, + "Oracle doesn't correctly calculate ISO 8601 week numbering before " + "1583 (the Gregorian calendar was introduced in 1582).": { + "db_functions.datetime.test_extract_trunc.DateFunctionTests." + "test_trunc_week_before_1000", + "db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests." + "test_trunc_week_before_1000", + }, + "Oracle extracts seconds including fractional seconds (#33517).": { + "db_functions.datetime.test_extract_trunc.DateFunctionTests." + "test_extract_second_func_no_fractional", + "db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests." + "test_extract_second_func_no_fractional", + }, + "Oracle doesn't support bitwise XOR.": { + "expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor", + "expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor_null", + "expressions.tests.ExpressionOperatorTests." + "test_lefthand_bitwise_xor_right_null", + }, + "Oracle requires ORDER BY in row_number, ANSI:SQL doesn't.": { + "expressions_window.tests.WindowFunctionTests.test_row_number_no_ordering", + }, + "Raises ORA-00600: internal error code.": { + "model_fields.test_jsonfield.TestQuerying.test_usage_in_subquery", + }, + "Oracle doesn't support changing collations on indexed columns (#33671).": { + "migrations.test_operations.OperationTests." + "test_alter_field_pk_fk_db_collation", + }, + "Oracle doesn't support comparing NCLOB to NUMBER.": { + "generic_relations_regress.tests.GenericRelationTests.test_textlink_filter", + }, + } + django_test_expected_failures = { + # A bug in Django/cx_Oracle with respect to string handling (#23843). + "annotations.tests.NonAggregateAnnotationTestCase.test_custom_functions", + "annotations.tests.NonAggregateAnnotationTestCase." + "test_custom_functions_can_ref_other_functions", + } + insert_test_table_with_defaults = ( + "INSERT INTO {} VALUES (DEFAULT, DEFAULT, DEFAULT)" + ) + + @cached_property + def introspected_field_types(self): + return { + **super().introspected_field_types, + "GenericIPAddressField": "CharField", + "PositiveBigIntegerField": "BigIntegerField", + "PositiveIntegerField": "IntegerField", + "PositiveSmallIntegerField": "IntegerField", + "SmallIntegerField": "IntegerField", + "TimeField": "DateTimeField", + } + + @cached_property + def supports_collation_on_charfield(self): + with self.connection.cursor() as cursor: + try: + cursor.execute("SELECT CAST('a' AS VARCHAR2(4001)) FROM dual") + except DatabaseError as e: + if e.args[0].code == 910: + return False + raise + return True diff --git a/testbed/django__django/django/db/backends/oracle/functions.py b/testbed/django__django/django/db/backends/oracle/functions.py new file mode 100644 index 0000000000000000000000000000000000000000..936cc9e73f19dbe9723702efeff4673dbb9c45fb --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/functions.py @@ -0,0 +1,26 @@ +from django.db.models import DecimalField, DurationField, Func + + +class IntervalToSeconds(Func): + function = "" + template = """ + EXTRACT(day from %(expressions)s) * 86400 + + EXTRACT(hour from %(expressions)s) * 3600 + + EXTRACT(minute from %(expressions)s) * 60 + + EXTRACT(second from %(expressions)s) + """ + + def __init__(self, expression, *, output_field=None, **extra): + super().__init__( + expression, output_field=output_field or DecimalField(), **extra + ) + + +class SecondsToInterval(Func): + function = "NUMTODSINTERVAL" + template = "%(function)s(%(expressions)s, 'SECOND')" + + def __init__(self, expression, *, output_field=None, **extra): + super().__init__( + expression, output_field=output_field or DurationField(), **extra + ) diff --git a/testbed/django__django/django/db/backends/oracle/introspection.py b/testbed/django__django/django/db/backends/oracle/introspection.py new file mode 100644 index 0000000000000000000000000000000000000000..2063b535ceb958b87c5fc6554236650596c72633 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/introspection.py @@ -0,0 +1,434 @@ +from collections import namedtuple + +import cx_Oracle + +from django.db import models +from django.db.backends.base.introspection import BaseDatabaseIntrospection +from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo +from django.utils.functional import cached_property + +FieldInfo = namedtuple( + "FieldInfo", BaseFieldInfo._fields + ("is_autofield", "is_json", "comment") +) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) + + +class DatabaseIntrospection(BaseDatabaseIntrospection): + cache_bust_counter = 1 + + # Maps type objects to Django Field types. + @cached_property + def data_types_reverse(self): + if self.connection.cx_oracle_version < (8,): + return { + cx_Oracle.BLOB: "BinaryField", + cx_Oracle.CLOB: "TextField", + cx_Oracle.DATETIME: "DateField", + cx_Oracle.FIXED_CHAR: "CharField", + cx_Oracle.FIXED_NCHAR: "CharField", + cx_Oracle.INTERVAL: "DurationField", + cx_Oracle.NATIVE_FLOAT: "FloatField", + cx_Oracle.NCHAR: "CharField", + cx_Oracle.NCLOB: "TextField", + cx_Oracle.NUMBER: "DecimalField", + cx_Oracle.STRING: "CharField", + cx_Oracle.TIMESTAMP: "DateTimeField", + } + else: + return { + cx_Oracle.DB_TYPE_DATE: "DateField", + cx_Oracle.DB_TYPE_BINARY_DOUBLE: "FloatField", + cx_Oracle.DB_TYPE_BLOB: "BinaryField", + cx_Oracle.DB_TYPE_CHAR: "CharField", + cx_Oracle.DB_TYPE_CLOB: "TextField", + cx_Oracle.DB_TYPE_INTERVAL_DS: "DurationField", + cx_Oracle.DB_TYPE_NCHAR: "CharField", + cx_Oracle.DB_TYPE_NCLOB: "TextField", + cx_Oracle.DB_TYPE_NVARCHAR: "CharField", + cx_Oracle.DB_TYPE_NUMBER: "DecimalField", + cx_Oracle.DB_TYPE_TIMESTAMP: "DateTimeField", + cx_Oracle.DB_TYPE_VARCHAR: "CharField", + } + + def get_field_type(self, data_type, description): + if data_type == cx_Oracle.NUMBER: + precision, scale = description[4:6] + if scale == 0: + if precision > 11: + return ( + "BigAutoField" + if description.is_autofield + else "BigIntegerField" + ) + elif 1 < precision < 6 and description.is_autofield: + return "SmallAutoField" + elif precision == 1: + return "BooleanField" + elif description.is_autofield: + return "AutoField" + else: + return "IntegerField" + elif scale == -127: + return "FloatField" + elif data_type == cx_Oracle.NCLOB and description.is_json: + return "JSONField" + + return super().get_field_type(data_type, description) + + def get_table_list(self, cursor): + """Return a list of table and view names in the current database.""" + cursor.execute( + """ + SELECT + user_tables.table_name, + 't', + user_tab_comments.comments + FROM user_tables + LEFT OUTER JOIN + user_tab_comments + ON user_tab_comments.table_name = user_tables.table_name + WHERE + NOT EXISTS ( + SELECT 1 + FROM user_mviews + WHERE user_mviews.mview_name = user_tables.table_name + ) + UNION ALL + SELECT view_name, 'v', NULL FROM user_views + UNION ALL + SELECT mview_name, 'v', NULL FROM user_mviews + """ + ) + return [ + TableInfo(self.identifier_converter(row[0]), row[1], row[2]) + for row in cursor.fetchall() + ] + + def get_table_description(self, cursor, table_name): + """ + Return a description of the table with the DB-API cursor.description + interface. + """ + # A default collation for the given table/view/materialized view. + cursor.execute( + """ + SELECT user_tables.default_collation + FROM user_tables + WHERE + user_tables.table_name = UPPER(%s) AND + NOT EXISTS ( + SELECT 1 + FROM user_mviews + WHERE user_mviews.mview_name = user_tables.table_name + ) + UNION ALL + SELECT user_views.default_collation + FROM user_views + WHERE user_views.view_name = UPPER(%s) + UNION ALL + SELECT user_mviews.default_collation + FROM user_mviews + WHERE user_mviews.mview_name = UPPER(%s) + """, + [table_name, table_name, table_name], + ) + row = cursor.fetchone() + default_table_collation = row[0] if row else "" + # user_tab_columns gives data default for columns + cursor.execute( + """ + SELECT + user_tab_cols.column_name, + user_tab_cols.data_default, + CASE + WHEN user_tab_cols.collation = %s + THEN NULL + ELSE user_tab_cols.collation + END collation, + CASE + WHEN user_tab_cols.char_used IS NULL + THEN user_tab_cols.data_length + ELSE user_tab_cols.char_length + END as display_size, + CASE + WHEN user_tab_cols.identity_column = 'YES' THEN 1 + ELSE 0 + END as is_autofield, + CASE + WHEN EXISTS ( + SELECT 1 + FROM user_json_columns + WHERE + user_json_columns.table_name = user_tab_cols.table_name AND + user_json_columns.column_name = user_tab_cols.column_name + ) + THEN 1 + ELSE 0 + END as is_json, + user_col_comments.comments as col_comment + FROM user_tab_cols + LEFT OUTER JOIN + user_col_comments ON + user_col_comments.column_name = user_tab_cols.column_name AND + user_col_comments.table_name = user_tab_cols.table_name + WHERE user_tab_cols.table_name = UPPER(%s) + """, + [default_table_collation, table_name], + ) + field_map = { + column: ( + display_size, + default.rstrip() if default and default != "NULL" else None, + collation, + is_autofield, + is_json, + comment, + ) + for ( + column, + default, + collation, + display_size, + is_autofield, + is_json, + comment, + ) in cursor.fetchall() + } + self.cache_bust_counter += 1 + cursor.execute( + "SELECT * FROM {} WHERE ROWNUM < 2 AND {} > 0".format( + self.connection.ops.quote_name(table_name), self.cache_bust_counter + ) + ) + description = [] + for desc in cursor.description: + name = desc[0] + ( + display_size, + default, + collation, + is_autofield, + is_json, + comment, + ) = field_map[name] + name %= {} # cx_Oracle, for some reason, doubles percent signs. + description.append( + FieldInfo( + self.identifier_converter(name), + desc[1], + display_size, + desc[3], + desc[4] or 0, + desc[5] or 0, + *desc[6:], + default, + collation, + is_autofield, + is_json, + comment, + ) + ) + return description + + def identifier_converter(self, name): + """Identifier comparison is case insensitive under Oracle.""" + return name.lower() + + def get_sequences(self, cursor, table_name, table_fields=()): + cursor.execute( + """ + SELECT + user_tab_identity_cols.sequence_name, + user_tab_identity_cols.column_name + FROM + user_tab_identity_cols, + user_constraints, + user_cons_columns cols + WHERE + user_constraints.constraint_name = cols.constraint_name + AND user_constraints.table_name = user_tab_identity_cols.table_name + AND cols.column_name = user_tab_identity_cols.column_name + AND user_constraints.constraint_type = 'P' + AND user_tab_identity_cols.table_name = UPPER(%s) + """, + [table_name], + ) + # Oracle allows only one identity column per table. + row = cursor.fetchone() + if row: + return [ + { + "name": self.identifier_converter(row[0]), + "table": self.identifier_converter(table_name), + "column": self.identifier_converter(row[1]), + } + ] + # To keep backward compatibility for AutoFields that aren't Oracle + # identity columns. + for f in table_fields: + if isinstance(f, models.AutoField): + return [{"table": table_name, "column": f.column}] + return [] + + def get_relations(self, cursor, table_name): + """ + Return a dictionary of {field_name: (field_name_other_table, other_table)} + representing all foreign keys in the given table. + """ + table_name = table_name.upper() + cursor.execute( + """ + SELECT ca.column_name, cb.table_name, cb.column_name + FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb + WHERE user_constraints.table_name = %s AND + user_constraints.constraint_name = ca.constraint_name AND + user_constraints.r_constraint_name = cb.constraint_name AND + ca.position = cb.position""", + [table_name], + ) + + return { + self.identifier_converter(field_name): ( + self.identifier_converter(rel_field_name), + self.identifier_converter(rel_table_name), + ) + for field_name, rel_table_name, rel_field_name in cursor.fetchall() + } + + def get_primary_key_columns(self, cursor, table_name): + cursor.execute( + """ + SELECT + cols.column_name + FROM + user_constraints, + user_cons_columns cols + WHERE + user_constraints.constraint_name = cols.constraint_name AND + user_constraints.constraint_type = 'P' AND + user_constraints.table_name = UPPER(%s) + ORDER BY + cols.position + """, + [table_name], + ) + return [self.identifier_converter(row[0]) for row in cursor.fetchall()] + + def get_constraints(self, cursor, table_name): + """ + Retrieve any constraints or keys (unique, pk, fk, check, index) across + one or more columns. + """ + constraints = {} + # Loop over the constraints, getting PKs, uniques, and checks + cursor.execute( + """ + SELECT + user_constraints.constraint_name, + LISTAGG(LOWER(cols.column_name), ',') + WITHIN GROUP (ORDER BY cols.position), + CASE user_constraints.constraint_type + WHEN 'P' THEN 1 + ELSE 0 + END AS is_primary_key, + CASE + WHEN user_constraints.constraint_type IN ('P', 'U') THEN 1 + ELSE 0 + END AS is_unique, + CASE user_constraints.constraint_type + WHEN 'C' THEN 1 + ELSE 0 + END AS is_check_constraint + FROM + user_constraints + LEFT OUTER JOIN + user_cons_columns cols + ON user_constraints.constraint_name = cols.constraint_name + WHERE + user_constraints.constraint_type = ANY('P', 'U', 'C') + AND user_constraints.table_name = UPPER(%s) + GROUP BY user_constraints.constraint_name, user_constraints.constraint_type + """, + [table_name], + ) + for constraint, columns, pk, unique, check in cursor.fetchall(): + constraint = self.identifier_converter(constraint) + constraints[constraint] = { + "columns": columns.split(","), + "primary_key": pk, + "unique": unique, + "foreign_key": None, + "check": check, + "index": unique, # All uniques come with an index + } + # Foreign key constraints + cursor.execute( + """ + SELECT + cons.constraint_name, + LISTAGG(LOWER(cols.column_name), ',') + WITHIN GROUP (ORDER BY cols.position), + LOWER(rcols.table_name), + LOWER(rcols.column_name) + FROM + user_constraints cons + INNER JOIN + user_cons_columns rcols + ON rcols.constraint_name = cons.r_constraint_name AND rcols.position = 1 + LEFT OUTER JOIN + user_cons_columns cols + ON cons.constraint_name = cols.constraint_name + WHERE + cons.constraint_type = 'R' AND + cons.table_name = UPPER(%s) + GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name + """, + [table_name], + ) + for constraint, columns, other_table, other_column in cursor.fetchall(): + constraint = self.identifier_converter(constraint) + constraints[constraint] = { + "primary_key": False, + "unique": False, + "foreign_key": (other_table, other_column), + "check": False, + "index": False, + "columns": columns.split(","), + } + # Now get indexes + cursor.execute( + """ + SELECT + ind.index_name, + LOWER(ind.index_type), + LOWER(ind.uniqueness), + LISTAGG(LOWER(cols.column_name), ',') + WITHIN GROUP (ORDER BY cols.column_position), + LISTAGG(cols.descend, ',') WITHIN GROUP (ORDER BY cols.column_position) + FROM + user_ind_columns cols, user_indexes ind + WHERE + cols.table_name = UPPER(%s) AND + NOT EXISTS ( + SELECT 1 + FROM user_constraints cons + WHERE ind.index_name = cons.index_name + ) AND cols.index_name = ind.index_name + GROUP BY ind.index_name, ind.index_type, ind.uniqueness + """, + [table_name], + ) + for constraint, type_, unique, columns, orders in cursor.fetchall(): + constraint = self.identifier_converter(constraint) + constraints[constraint] = { + "primary_key": False, + "unique": unique == "unique", + "foreign_key": None, + "check": False, + "index": True, + "type": "idx" if type_ == "normal" else type_, + "columns": columns.split(","), + "orders": orders.split(","), + } + return constraints diff --git a/testbed/django__django/django/db/backends/oracle/operations.py b/testbed/django__django/django/db/backends/oracle/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..64b1f82071e6b1b6a2bdfb8935141c2c43254c8c --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/operations.py @@ -0,0 +1,722 @@ +import datetime +import uuid +from functools import lru_cache + +from django.conf import settings +from django.db import DatabaseError, NotSupportedError +from django.db.backends.base.operations import BaseDatabaseOperations +from django.db.backends.utils import split_tzname_delta, strip_quotes, truncate_name +from django.db.models import AutoField, Exists, ExpressionWrapper, Lookup +from django.db.models.expressions import RawSQL +from django.db.models.sql.where import WhereNode +from django.utils import timezone +from django.utils.encoding import force_bytes, force_str +from django.utils.functional import cached_property +from django.utils.regex_helper import _lazy_re_compile + +from .base import Database +from .utils import BulkInsertMapper, InsertVar, Oracle_datetime + + +class DatabaseOperations(BaseDatabaseOperations): + # Oracle uses NUMBER(5), NUMBER(11), and NUMBER(19) for integer fields. + # SmallIntegerField uses NUMBER(11) instead of NUMBER(5), which is used by + # SmallAutoField, to preserve backward compatibility. + integer_field_ranges = { + "SmallIntegerField": (-99999999999, 99999999999), + "IntegerField": (-99999999999, 99999999999), + "BigIntegerField": (-9999999999999999999, 9999999999999999999), + "PositiveBigIntegerField": (0, 9999999999999999999), + "PositiveSmallIntegerField": (0, 99999999999), + "PositiveIntegerField": (0, 99999999999), + "SmallAutoField": (-99999, 99999), + "AutoField": (-99999999999, 99999999999), + "BigAutoField": (-9999999999999999999, 9999999999999999999), + } + set_operators = {**BaseDatabaseOperations.set_operators, "difference": "MINUS"} + + # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. + _sequence_reset_sql = """ +DECLARE + table_value integer; + seq_value integer; + seq_name user_tab_identity_cols.sequence_name%%TYPE; +BEGIN + BEGIN + SELECT sequence_name INTO seq_name FROM user_tab_identity_cols + WHERE table_name = '%(table_name)s' AND + column_name = '%(column_name)s'; + EXCEPTION WHEN NO_DATA_FOUND THEN + seq_name := '%(no_autofield_sequence_name)s'; + END; + + SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; + SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences + WHERE sequence_name = seq_name; + WHILE table_value > seq_value LOOP + EXECUTE IMMEDIATE 'SELECT "'||seq_name||'".nextval FROM DUAL' + INTO seq_value; + END LOOP; +END; +/""" + + # Oracle doesn't support string without precision; use the max string size. + cast_char_field_without_max_length = "NVARCHAR2(2000)" + cast_data_types = { + "AutoField": "NUMBER(11)", + "BigAutoField": "NUMBER(19)", + "SmallAutoField": "NUMBER(5)", + "TextField": cast_char_field_without_max_length, + } + + def cache_key_culling_sql(self): + cache_key = self.quote_name("cache_key") + return ( + f"SELECT {cache_key} " + f"FROM %s " + f"ORDER BY {cache_key} OFFSET %%s ROWS FETCH FIRST 1 ROWS ONLY" + ) + + # EXTRACT format cannot be passed in parameters. + _extract_format_re = _lazy_re_compile(r"[A-Z_]+") + + def date_extract_sql(self, lookup_type, sql, params): + extract_sql = f"TO_CHAR({sql}, %s)" + extract_param = None + if lookup_type == "week_day": + # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. + extract_param = "D" + elif lookup_type == "iso_week_day": + extract_sql = f"TO_CHAR({sql} - 1, %s)" + extract_param = "D" + elif lookup_type == "week": + # IW = ISO week number + extract_param = "IW" + elif lookup_type == "quarter": + extract_param = "Q" + elif lookup_type == "iso_year": + extract_param = "IYYY" + else: + lookup_type = lookup_type.upper() + if not self._extract_format_re.fullmatch(lookup_type): + raise ValueError(f"Invalid loookup type: {lookup_type!r}") + # https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/EXTRACT-datetime.html + return f"EXTRACT({lookup_type} FROM {sql})", params + return extract_sql, (*params, extract_param) + + def date_trunc_sql(self, lookup_type, sql, params, tzname=None): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + # https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/ROUND-and-TRUNC-Date-Functions.html + trunc_param = None + if lookup_type in ("year", "month"): + trunc_param = lookup_type.upper() + elif lookup_type == "quarter": + trunc_param = "Q" + elif lookup_type == "week": + trunc_param = "IW" + else: + return f"TRUNC({sql})", params + return f"TRUNC({sql}, %s)", (*params, trunc_param) + + # Oracle crashes with "ORA-03113: end-of-file on communication channel" + # if the time zone name is passed in parameter. Use interpolation instead. + # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ + # This regexp matches all time zone names from the zoneinfo database. + _tzname_re = _lazy_re_compile(r"^[\w/:+-]+$") + + def _prepare_tzname_delta(self, tzname): + tzname, sign, offset = split_tzname_delta(tzname) + return f"{sign}{offset}" if offset else tzname + + def _convert_sql_to_tz(self, sql, params, tzname): + if not (settings.USE_TZ and tzname): + return sql, params + if not self._tzname_re.match(tzname): + raise ValueError("Invalid time zone name: %s" % tzname) + # Convert from connection timezone to the local time, returning + # TIMESTAMP WITH TIME ZONE and cast it back to TIMESTAMP to strip the + # TIME ZONE details. + if self.connection.timezone_name != tzname: + from_timezone_name = self.connection.timezone_name + to_timezone_name = self._prepare_tzname_delta(tzname) + return ( + f"CAST((FROM_TZ({sql}, '{from_timezone_name}') AT TIME ZONE " + f"'{to_timezone_name}') AS TIMESTAMP)", + params, + ) + return sql, params + + def datetime_cast_date_sql(self, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + return f"TRUNC({sql})", params + + def datetime_cast_time_sql(self, sql, params, tzname): + # Since `TimeField` values are stored as TIMESTAMP change to the + # default date and convert the field to the specified timezone. + sql, params = self._convert_sql_to_tz(sql, params, tzname) + convert_datetime_sql = ( + f"TO_TIMESTAMP(CONCAT('1900-01-01 ', TO_CHAR({sql}, 'HH24:MI:SS.FF')), " + f"'YYYY-MM-DD HH24:MI:SS.FF')" + ) + return ( + f"CASE WHEN {sql} IS NOT NULL THEN {convert_datetime_sql} ELSE NULL END", + (*params, *params), + ) + + def datetime_extract_sql(self, lookup_type, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + return self.date_extract_sql(lookup_type, sql, params) + + def datetime_trunc_sql(self, lookup_type, sql, params, tzname): + sql, params = self._convert_sql_to_tz(sql, params, tzname) + # https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/ROUND-and-TRUNC-Date-Functions.html + trunc_param = None + if lookup_type in ("year", "month"): + trunc_param = lookup_type.upper() + elif lookup_type == "quarter": + trunc_param = "Q" + elif lookup_type == "week": + trunc_param = "IW" + elif lookup_type == "hour": + trunc_param = "HH24" + elif lookup_type == "minute": + trunc_param = "MI" + elif lookup_type == "day": + return f"TRUNC({sql})", params + else: + # Cast to DATE removes sub-second precision. + return f"CAST({sql} AS DATE)", params + return f"TRUNC({sql}, %s)", (*params, trunc_param) + + def time_trunc_sql(self, lookup_type, sql, params, tzname=None): + # The implementation is similar to `datetime_trunc_sql` as both + # `DateTimeField` and `TimeField` are stored as TIMESTAMP where + # the date part of the later is ignored. + sql, params = self._convert_sql_to_tz(sql, params, tzname) + trunc_param = None + if lookup_type == "hour": + trunc_param = "HH24" + elif lookup_type == "minute": + trunc_param = "MI" + elif lookup_type == "second": + # Cast to DATE removes sub-second precision. + return f"CAST({sql} AS DATE)", params + return f"TRUNC({sql}, %s)", (*params, trunc_param) + + def get_db_converters(self, expression): + converters = super().get_db_converters(expression) + internal_type = expression.output_field.get_internal_type() + if internal_type in ["JSONField", "TextField"]: + converters.append(self.convert_textfield_value) + elif internal_type == "BinaryField": + converters.append(self.convert_binaryfield_value) + elif internal_type == "BooleanField": + converters.append(self.convert_booleanfield_value) + elif internal_type == "DateTimeField": + if settings.USE_TZ: + converters.append(self.convert_datetimefield_value) + elif internal_type == "DateField": + converters.append(self.convert_datefield_value) + elif internal_type == "TimeField": + converters.append(self.convert_timefield_value) + elif internal_type == "UUIDField": + converters.append(self.convert_uuidfield_value) + # Oracle stores empty strings as null. If the field accepts the empty + # string, undo this to adhere to the Django convention of using + # the empty string instead of null. + if expression.output_field.empty_strings_allowed: + converters.append( + self.convert_empty_bytes + if internal_type == "BinaryField" + else self.convert_empty_string + ) + return converters + + def convert_textfield_value(self, value, expression, connection): + if isinstance(value, Database.LOB): + value = value.read() + return value + + def convert_binaryfield_value(self, value, expression, connection): + if isinstance(value, Database.LOB): + value = force_bytes(value.read()) + return value + + def convert_booleanfield_value(self, value, expression, connection): + if value in (0, 1): + value = bool(value) + return value + + # cx_Oracle always returns datetime.datetime objects for + # DATE and TIMESTAMP columns, but Django wants to see a + # python datetime.date, .time, or .datetime. + + def convert_datetimefield_value(self, value, expression, connection): + if value is not None: + value = timezone.make_aware(value, self.connection.timezone) + return value + + def convert_datefield_value(self, value, expression, connection): + if isinstance(value, Database.Timestamp): + value = value.date() + return value + + def convert_timefield_value(self, value, expression, connection): + if isinstance(value, Database.Timestamp): + value = value.time() + return value + + def convert_uuidfield_value(self, value, expression, connection): + if value is not None: + value = uuid.UUID(value) + return value + + @staticmethod + def convert_empty_string(value, expression, connection): + return "" if value is None else value + + @staticmethod + def convert_empty_bytes(value, expression, connection): + return b"" if value is None else value + + def deferrable_sql(self): + return " DEFERRABLE INITIALLY DEFERRED" + + def fetch_returned_insert_columns(self, cursor, returning_params): + columns = [] + for param in returning_params: + value = param.get_value() + if value == []: + raise DatabaseError( + "The database did not return a new row id. Probably " + '"ORA-1403: no data found" was raised internally but was ' + "hidden by the Oracle OCI library (see " + "https://code.djangoproject.com/ticket/28859)." + ) + columns.append(value[0]) + return tuple(columns) + + def no_limit_value(self): + return None + + def limit_offset_sql(self, low_mark, high_mark): + fetch, offset = self._get_limit_offset_params(low_mark, high_mark) + return " ".join( + sql + for sql in ( + ("OFFSET %d ROWS" % offset) if offset else None, + ("FETCH FIRST %d ROWS ONLY" % fetch) if fetch else None, + ) + if sql + ) + + def last_executed_query(self, cursor, sql, params): + # https://cx-oracle.readthedocs.io/en/latest/api_manual/cursor.html#Cursor.statement + # The DB API definition does not define this attribute. + statement = cursor.statement + # Unlike Psycopg's `query` and MySQLdb`'s `_executed`, cx_Oracle's + # `statement` doesn't contain the query parameters. Substitute + # parameters manually. + if params: + if isinstance(params, (tuple, list)): + params = { + f":arg{i}": param for i, param in enumerate(dict.fromkeys(params)) + } + elif isinstance(params, dict): + params = {f":{key}": val for (key, val) in params.items()} + for key in sorted(params, key=len, reverse=True): + statement = statement.replace( + key, force_str(params[key], errors="replace") + ) + return statement + + def last_insert_id(self, cursor, table_name, pk_name): + sq_name = self._get_sequence_name(cursor, strip_quotes(table_name), pk_name) + cursor.execute('"%s".currval' % sq_name) + return cursor.fetchone()[0] + + def lookup_cast(self, lookup_type, internal_type=None): + if lookup_type in ("iexact", "icontains", "istartswith", "iendswith"): + return "UPPER(%s)" + if ( + lookup_type != "isnull" and internal_type in ("BinaryField", "TextField") + ) or (lookup_type == "exact" and internal_type == "JSONField"): + return "DBMS_LOB.SUBSTR(%s)" + return "%s" + + def max_in_list_size(self): + return 1000 + + def max_name_length(self): + return 30 + + def pk_default_value(self): + return "NULL" + + def prep_for_iexact_query(self, x): + return x + + def process_clob(self, value): + if value is None: + return "" + return value.read() + + def quote_name(self, name): + # SQL92 requires delimited (quoted) names to be case-sensitive. When + # not quoted, Oracle has case-insensitive behavior for identifiers, but + # always defaults to uppercase. + # We simplify things by making Oracle identifiers always uppercase. + if not name.startswith('"') and not name.endswith('"'): + name = '"%s"' % truncate_name(name, self.max_name_length()) + # Oracle puts the query text into a (query % args) construct, so % signs + # in names need to be escaped. The '%%' will be collapsed back to '%' at + # that stage so we aren't really making the name longer here. + name = name.replace("%", "%%") + return name.upper() + + def regex_lookup(self, lookup_type): + if lookup_type == "regex": + match_option = "'c'" + else: + match_option = "'i'" + return "REGEXP_LIKE(%%s, %%s, %s)" % match_option + + def return_insert_columns(self, fields): + if not fields: + return "", () + field_names = [] + params = [] + for field in fields: + field_names.append( + "%s.%s" + % ( + self.quote_name(field.model._meta.db_table), + self.quote_name(field.column), + ) + ) + params.append(InsertVar(field)) + return "RETURNING %s INTO %s" % ( + ", ".join(field_names), + ", ".join(["%s"] * len(params)), + ), tuple(params) + + def __foreign_key_constraints(self, table_name, recursive): + with self.connection.cursor() as cursor: + if recursive: + cursor.execute( + """ + SELECT + user_tables.table_name, rcons.constraint_name + FROM + user_tables + JOIN + user_constraints cons + ON (user_tables.table_name = cons.table_name + AND cons.constraint_type = ANY('P', 'U')) + LEFT JOIN + user_constraints rcons + ON (user_tables.table_name = rcons.table_name + AND rcons.constraint_type = 'R') + START WITH user_tables.table_name = UPPER(%s) + CONNECT BY + NOCYCLE PRIOR cons.constraint_name = rcons.r_constraint_name + GROUP BY + user_tables.table_name, rcons.constraint_name + HAVING user_tables.table_name != UPPER(%s) + ORDER BY MAX(level) DESC + """, + (table_name, table_name), + ) + else: + cursor.execute( + """ + SELECT + cons.table_name, cons.constraint_name + FROM + user_constraints cons + WHERE + cons.constraint_type = 'R' + AND cons.table_name = UPPER(%s) + """, + (table_name,), + ) + return cursor.fetchall() + + @cached_property + def _foreign_key_constraints(self): + # 512 is large enough to fit the ~330 tables (as of this writing) in + # Django's test suite. + return lru_cache(maxsize=512)(self.__foreign_key_constraints) + + def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): + if not tables: + return [] + + truncated_tables = {table.upper() for table in tables} + constraints = set() + # Oracle's TRUNCATE CASCADE only works with ON DELETE CASCADE foreign + # keys which Django doesn't define. Emulate the PostgreSQL behavior + # which truncates all dependent tables by manually retrieving all + # foreign key constraints and resolving dependencies. + for table in tables: + for foreign_table, constraint in self._foreign_key_constraints( + table, recursive=allow_cascade + ): + if allow_cascade: + truncated_tables.add(foreign_table) + constraints.add((foreign_table, constraint)) + sql = ( + [ + "%s %s %s %s %s %s %s %s;" + % ( + style.SQL_KEYWORD("ALTER"), + style.SQL_KEYWORD("TABLE"), + style.SQL_FIELD(self.quote_name(table)), + style.SQL_KEYWORD("DISABLE"), + style.SQL_KEYWORD("CONSTRAINT"), + style.SQL_FIELD(self.quote_name(constraint)), + style.SQL_KEYWORD("KEEP"), + style.SQL_KEYWORD("INDEX"), + ) + for table, constraint in constraints + ] + + [ + "%s %s %s;" + % ( + style.SQL_KEYWORD("TRUNCATE"), + style.SQL_KEYWORD("TABLE"), + style.SQL_FIELD(self.quote_name(table)), + ) + for table in truncated_tables + ] + + [ + "%s %s %s %s %s %s;" + % ( + style.SQL_KEYWORD("ALTER"), + style.SQL_KEYWORD("TABLE"), + style.SQL_FIELD(self.quote_name(table)), + style.SQL_KEYWORD("ENABLE"), + style.SQL_KEYWORD("CONSTRAINT"), + style.SQL_FIELD(self.quote_name(constraint)), + ) + for table, constraint in constraints + ] + ) + if reset_sequences: + sequences = [ + sequence + for sequence in self.connection.introspection.sequence_list() + if sequence["table"].upper() in truncated_tables + ] + # Since we've just deleted all the rows, running our sequence ALTER + # code will reset the sequence to 0. + sql.extend(self.sequence_reset_by_name_sql(style, sequences)) + return sql + + def sequence_reset_by_name_sql(self, style, sequences): + sql = [] + for sequence_info in sequences: + no_autofield_sequence_name = self._get_no_autofield_sequence_name( + sequence_info["table"] + ) + table = self.quote_name(sequence_info["table"]) + column = self.quote_name(sequence_info["column"] or "id") + query = self._sequence_reset_sql % { + "no_autofield_sequence_name": no_autofield_sequence_name, + "table": table, + "column": column, + "table_name": strip_quotes(table), + "column_name": strip_quotes(column), + } + sql.append(query) + return sql + + def sequence_reset_sql(self, style, model_list): + output = [] + query = self._sequence_reset_sql + for model in model_list: + for f in model._meta.local_fields: + if isinstance(f, AutoField): + no_autofield_sequence_name = self._get_no_autofield_sequence_name( + model._meta.db_table + ) + table = self.quote_name(model._meta.db_table) + column = self.quote_name(f.column) + output.append( + query + % { + "no_autofield_sequence_name": no_autofield_sequence_name, + "table": table, + "column": column, + "table_name": strip_quotes(table), + "column_name": strip_quotes(column), + } + ) + # Only one AutoField is allowed per model, so don't + # continue to loop + break + return output + + def start_transaction_sql(self): + return "" + + def tablespace_sql(self, tablespace, inline=False): + if inline: + return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) + else: + return "TABLESPACE %s" % self.quote_name(tablespace) + + def adapt_datefield_value(self, value): + """ + Transform a date value to an object compatible with what is expected + by the backend driver for date columns. + The default implementation transforms the date to text, but that is not + necessary for Oracle. + """ + return value + + def adapt_datetimefield_value(self, value): + """ + Transform a datetime value to an object compatible with what is expected + by the backend driver for datetime columns. + + If naive datetime is passed assumes that is in UTC. Normally Django + models.DateTimeField makes sure that if USE_TZ is True passed datetime + is timezone aware. + """ + + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + # cx_Oracle doesn't support tz-aware datetimes + if timezone.is_aware(value): + if settings.USE_TZ: + value = timezone.make_naive(value, self.connection.timezone) + else: + raise ValueError( + "Oracle backend does not support timezone-aware datetimes when " + "USE_TZ is False." + ) + + return Oracle_datetime.from_datetime(value) + + def adapt_timefield_value(self, value): + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + if isinstance(value, str): + return datetime.datetime.strptime(value, "%H:%M:%S") + + # Oracle doesn't support tz-aware times + if timezone.is_aware(value): + raise ValueError("Oracle backend does not support timezone-aware times.") + + return Oracle_datetime( + 1900, 1, 1, value.hour, value.minute, value.second, value.microsecond + ) + + def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): + return value + + def combine_expression(self, connector, sub_expressions): + lhs, rhs = sub_expressions + if connector == "%%": + return "MOD(%s)" % ",".join(sub_expressions) + elif connector == "&": + return "BITAND(%s)" % ",".join(sub_expressions) + elif connector == "|": + return "BITAND(-%(lhs)s-1,%(rhs)s)+%(lhs)s" % {"lhs": lhs, "rhs": rhs} + elif connector == "<<": + return "(%(lhs)s * POWER(2, %(rhs)s))" % {"lhs": lhs, "rhs": rhs} + elif connector == ">>": + return "FLOOR(%(lhs)s / POWER(2, %(rhs)s))" % {"lhs": lhs, "rhs": rhs} + elif connector == "^": + return "POWER(%s)" % ",".join(sub_expressions) + elif connector == "#": + raise NotSupportedError("Bitwise XOR is not supported in Oracle.") + return super().combine_expression(connector, sub_expressions) + + def _get_no_autofield_sequence_name(self, table): + """ + Manually created sequence name to keep backward compatibility for + AutoFields that aren't Oracle identity columns. + """ + name_length = self.max_name_length() - 3 + return "%s_SQ" % truncate_name(strip_quotes(table), name_length).upper() + + def _get_sequence_name(self, cursor, table, pk_name): + cursor.execute( + """ + SELECT sequence_name + FROM user_tab_identity_cols + WHERE table_name = UPPER(%s) + AND column_name = UPPER(%s)""", + [table, pk_name], + ) + row = cursor.fetchone() + return self._get_no_autofield_sequence_name(table) if row is None else row[0] + + def bulk_insert_sql(self, fields, placeholder_rows): + query = [] + for row in placeholder_rows: + select = [] + for i, placeholder in enumerate(row): + # A model without any fields has fields=[None]. + if fields[i]: + internal_type = getattr( + fields[i], "target_field", fields[i] + ).get_internal_type() + placeholder = ( + BulkInsertMapper.types.get(internal_type, "%s") % placeholder + ) + # Add columns aliases to the first select to avoid "ORA-00918: + # column ambiguously defined" when two or more columns in the + # first select have the same value. + if not query: + placeholder = "%s col_%s" % (placeholder, i) + select.append(placeholder) + query.append("SELECT %s FROM DUAL" % ", ".join(select)) + # Bulk insert to tables with Oracle identity columns causes Oracle to + # add sequence.nextval to it. Sequence.nextval cannot be used with the + # UNION operator. To prevent incorrect SQL, move UNION to a subquery. + return "SELECT * FROM (%s)" % " UNION ALL ".join(query) + + def subtract_temporals(self, internal_type, lhs, rhs): + if internal_type == "DateField": + lhs_sql, lhs_params = lhs + rhs_sql, rhs_params = rhs + params = (*lhs_params, *rhs_params) + return ( + "NUMTODSINTERVAL(TO_NUMBER(%s - %s), 'DAY')" % (lhs_sql, rhs_sql), + params, + ) + return super().subtract_temporals(internal_type, lhs, rhs) + + def bulk_batch_size(self, fields, objs): + """Oracle restricts the number of parameters in a query.""" + if fields: + return self.connection.features.max_query_params // len(fields) + return len(objs) + + def conditional_expression_supported_in_where_clause(self, expression): + """ + Oracle supports only EXISTS(...) or filters in the WHERE clause, others + must be compared with True. + """ + if isinstance(expression, (Exists, Lookup, WhereNode)): + return True + if isinstance(expression, ExpressionWrapper) and expression.conditional: + return self.conditional_expression_supported_in_where_clause( + expression.expression + ) + if isinstance(expression, RawSQL) and expression.conditional: + return True + return False diff --git a/testbed/django__django/django/db/backends/oracle/schema.py b/testbed/django__django/django/db/backends/oracle/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..c8dd64650fcbc73535559762e0d48d8ba12caf62 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/schema.py @@ -0,0 +1,252 @@ +import copy +import datetime +import re + +from django.db import DatabaseError +from django.db.backends.base.schema import ( + BaseDatabaseSchemaEditor, + _related_non_m2m_objects, +) +from django.utils.duration import duration_iso_string + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" + sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s" + sql_alter_column_null = "MODIFY %(column)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s NOT NULL" + sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s" + sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL" + sql_alter_column_no_default_null = sql_alter_column_no_default + + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" + sql_create_column_inline_fk = ( + "CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s" + ) + sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS" + sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s" + + def quote_value(self, value): + if isinstance(value, (datetime.date, datetime.time, datetime.datetime)): + return "'%s'" % value + elif isinstance(value, datetime.timedelta): + return "'%s'" % duration_iso_string(value) + elif isinstance(value, str): + return "'%s'" % value.replace("'", "''") + elif isinstance(value, (bytes, bytearray, memoryview)): + return "'%s'" % value.hex() + elif isinstance(value, bool): + return "1" if value else "0" + else: + return str(value) + + def remove_field(self, model, field): + # If the column is an identity column, drop the identity before + # removing the field. + if self._is_identity_column(model._meta.db_table, field.column): + self._drop_identity(model._meta.db_table, field.column) + super().remove_field(model, field) + + def delete_model(self, model): + # Run superclass action + super().delete_model(model) + # Clean up manually created sequence. + self.execute( + """ + DECLARE + i INTEGER; + BEGIN + SELECT COUNT(1) INTO i FROM USER_SEQUENCES + WHERE SEQUENCE_NAME = '%(sq_name)s'; + IF i = 1 THEN + EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; + END IF; + END; + /""" + % { + "sq_name": self.connection.ops._get_no_autofield_sequence_name( + model._meta.db_table + ) + } + ) + + def alter_field(self, model, old_field, new_field, strict=False): + try: + super().alter_field(model, old_field, new_field, strict) + except DatabaseError as e: + description = str(e) + # If we're changing type to an unsupported type we need a + # SQLite-ish workaround + if "ORA-22858" in description or "ORA-22859" in description: + self._alter_field_type_workaround(model, old_field, new_field) + # If an identity column is changing to a non-numeric type, drop the + # identity first. + elif "ORA-30675" in description: + self._drop_identity(model._meta.db_table, old_field.column) + self.alter_field(model, old_field, new_field, strict) + # If a primary key column is changing to an identity column, drop + # the primary key first. + elif "ORA-30673" in description and old_field.primary_key: + self._delete_primary_key(model, strict=True) + self._alter_field_type_workaround(model, old_field, new_field) + # If a collation is changing on a primary key, drop the primary key + # first. + elif "ORA-43923" in description and old_field.primary_key: + self._delete_primary_key(model, strict=True) + self.alter_field(model, old_field, new_field, strict) + # Restore a primary key, if needed. + if new_field.primary_key: + self.execute(self._create_primary_key_sql(model, new_field)) + else: + raise + + def _alter_field_type_workaround(self, model, old_field, new_field): + """ + Oracle refuses to change from some type to other type. + What we need to do instead is: + - Add a nullable version of the desired field with a temporary name. If + the new column is an auto field, then the temporary column can't be + nullable. + - Update the table to transfer values from old to new + - Drop old column + - Rename the new column and possibly drop the nullable property + """ + # Make a new field that's like the new one but with a temporary + # column name. + new_temp_field = copy.deepcopy(new_field) + new_temp_field.null = new_field.get_internal_type() not in ( + "AutoField", + "BigAutoField", + "SmallAutoField", + ) + new_temp_field.column = self._generate_temp_name(new_field.column) + # Add it + self.add_field(model, new_temp_field) + # Explicit data type conversion + # https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf + # /Data-Type-Comparison-Rules.html#GUID-D0C5A47E-6F93-4C2D-9E49-4F2B86B359DD + new_value = self.quote_name(old_field.column) + old_type = old_field.db_type(self.connection) + if re.match("^N?CLOB", old_type): + new_value = "TO_CHAR(%s)" % new_value + old_type = "VARCHAR2" + if re.match("^N?VARCHAR2", old_type): + new_internal_type = new_field.get_internal_type() + if new_internal_type == "DateField": + new_value = "TO_DATE(%s, 'YYYY-MM-DD')" % new_value + elif new_internal_type == "DateTimeField": + new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value + elif new_internal_type == "TimeField": + # TimeField are stored as TIMESTAMP with a 1900-01-01 date part. + new_value = "CONCAT('1900-01-01 ', %s)" % new_value + new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value + # Transfer values across + self.execute( + "UPDATE %s set %s=%s" + % ( + self.quote_name(model._meta.db_table), + self.quote_name(new_temp_field.column), + new_value, + ) + ) + # Drop the old field + self.remove_field(model, old_field) + # Rename and possibly make the new field NOT NULL + super().alter_field(model, new_temp_field, new_field) + # Recreate foreign key (if necessary) because the old field is not + # passed to the alter_field() and data types of new_temp_field and + # new_field always match. + new_type = new_field.db_type(self.connection) + if ( + (old_field.primary_key and new_field.primary_key) + or (old_field.unique and new_field.unique) + ) and old_type != new_type: + for _, rel in _related_non_m2m_objects(new_temp_field, new_field): + if rel.field.db_constraint: + self.execute( + self._create_fk_sql(rel.related_model, rel.field, "_fk") + ) + + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): + auto_field_types = {"AutoField", "BigAutoField", "SmallAutoField"} + # Drop the identity if migrating away from AutoField. + if ( + old_field.get_internal_type() in auto_field_types + and new_field.get_internal_type() not in auto_field_types + and self._is_identity_column(model._meta.db_table, new_field.column) + ): + self._drop_identity(model._meta.db_table, new_field.column) + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + + def normalize_name(self, name): + """ + Get the properly shortened and uppercased identifier as returned by + quote_name() but without the quotes. + """ + nn = self.quote_name(name) + if nn[0] == '"' and nn[-1] == '"': + nn = nn[1:-1] + return nn + + def _generate_temp_name(self, for_name): + """Generate temporary names for workarounds that need temp columns.""" + suffix = hex(hash(for_name)).upper()[1:] + return self.normalize_name(for_name + "_" + suffix) + + def prepare_default(self, value): + # Replace % with %% as %-formatting is applied in + # FormatStylePlaceholderCursor._fix_for_params(). + return self.quote_value(value).replace("%", "%%") + + def _field_should_be_indexed(self, model, field): + create_index = super()._field_should_be_indexed(model, field) + db_type = field.db_type(self.connection) + if ( + db_type is not None + and db_type.lower() in self.connection._limited_data_types + ): + return False + return create_index + + def _is_identity_column(self, table_name, column_name): + with self.connection.cursor() as cursor: + cursor.execute( + """ + SELECT + CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END + FROM user_tab_cols + WHERE table_name = %s AND + column_name = %s + """, + [self.normalize_name(table_name), self.normalize_name(column_name)], + ) + row = cursor.fetchone() + return row[0] if row else False + + def _drop_identity(self, table_name, column_name): + self.execute( + "ALTER TABLE %(table)s MODIFY %(column)s DROP IDENTITY" + % { + "table": self.quote_name(table_name), + "column": self.quote_name(column_name), + } + ) + + def _get_default_collation(self, table_name): + with self.connection.cursor() as cursor: + cursor.execute( + """ + SELECT default_collation FROM user_tables WHERE table_name = %s + """, + [self.normalize_name(table_name)], + ) + return cursor.fetchone()[0] + + def _collate_sql(self, collation, old_collation=None, table_name=None): + if collation is None and old_collation is not None: + collation = self._get_default_collation(table_name) + return super()._collate_sql(collation, old_collation, table_name) diff --git a/testbed/django__django/django/db/backends/oracle/utils.py b/testbed/django__django/django/db/backends/oracle/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8941a85967fb15f78c2255b281a658a57373359b --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/utils.py @@ -0,0 +1,97 @@ +import datetime + +from .base import Database + + +class InsertVar: + """ + A late-binding cursor variable that can be passed to Cursor.execute + as a parameter, in order to receive the id of the row created by an + insert statement. + """ + + types = { + "AutoField": int, + "BigAutoField": int, + "SmallAutoField": int, + "IntegerField": int, + "BigIntegerField": int, + "SmallIntegerField": int, + "PositiveBigIntegerField": int, + "PositiveSmallIntegerField": int, + "PositiveIntegerField": int, + "FloatField": Database.NATIVE_FLOAT, + "DateTimeField": Database.TIMESTAMP, + "DateField": Database.Date, + "DecimalField": Database.NUMBER, + } + + def __init__(self, field): + internal_type = getattr(field, "target_field", field).get_internal_type() + self.db_type = self.types.get(internal_type, str) + self.bound_param = None + + def bind_parameter(self, cursor): + self.bound_param = cursor.cursor.var(self.db_type) + return self.bound_param + + def get_value(self): + return self.bound_param.getvalue() + + +class Oracle_datetime(datetime.datetime): + """ + A datetime object, with an additional class attribute + to tell cx_Oracle to save the microseconds too. + """ + + input_size = Database.TIMESTAMP + + @classmethod + def from_datetime(cls, dt): + return Oracle_datetime( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + ) + + +class BulkInsertMapper: + BLOB = "TO_BLOB(%s)" + DATE = "TO_DATE(%s)" + INTERVAL = "CAST(%s as INTERVAL DAY(9) TO SECOND(6))" + NCLOB = "TO_NCLOB(%s)" + NUMBER = "TO_NUMBER(%s)" + TIMESTAMP = "TO_TIMESTAMP(%s)" + + types = { + "AutoField": NUMBER, + "BigAutoField": NUMBER, + "BigIntegerField": NUMBER, + "BinaryField": BLOB, + "BooleanField": NUMBER, + "DateField": DATE, + "DateTimeField": TIMESTAMP, + "DecimalField": NUMBER, + "DurationField": INTERVAL, + "FloatField": NUMBER, + "IntegerField": NUMBER, + "PositiveBigIntegerField": NUMBER, + "PositiveIntegerField": NUMBER, + "PositiveSmallIntegerField": NUMBER, + "SmallAutoField": NUMBER, + "SmallIntegerField": NUMBER, + "TextField": NCLOB, + "TimeField": TIMESTAMP, + } + + +def dsn(settings_dict): + if settings_dict["PORT"]: + host = settings_dict["HOST"].strip() or "localhost" + return Database.makedsn(host, int(settings_dict["PORT"]), settings_dict["NAME"]) + return settings_dict["NAME"] diff --git a/testbed/django__django/django/db/backends/oracle/validation.py b/testbed/django__django/django/db/backends/oracle/validation.py new file mode 100644 index 0000000000000000000000000000000000000000..4035b1208534b6098cb2d4ca1b82c694ca003f31 --- /dev/null +++ b/testbed/django__django/django/db/backends/oracle/validation.py @@ -0,0 +1,22 @@ +from django.core import checks +from django.db.backends.base.validation import BaseDatabaseValidation + + +class DatabaseValidation(BaseDatabaseValidation): + def check_field_type(self, field, field_type): + """Oracle doesn't support a database index on some data types.""" + errors = [] + if field.db_index and field_type.lower() in self.connection._limited_data_types: + errors.append( + checks.Warning( + "Oracle does not support a database index on %s columns." + % field_type, + hint=( + "An index won't be created. Silence this warning if " + "you don't care about it." + ), + obj=field, + id="fields.W162", + ) + ) + return errors diff --git a/testbed/django__django/django/db/backends/postgresql/__init__.py b/testbed/django__django/django/db/backends/postgresql/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/db/backends/postgresql/base.py b/testbed/django__django/django/db/backends/postgresql/base.py new file mode 100644 index 0000000000000000000000000000000000000000..d92ad587102c116941c5fb6a1816821086f39502 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/base.py @@ -0,0 +1,487 @@ +""" +PostgreSQL database backend for Django. + +Requires psycopg2 >= 2.8.4 or psycopg >= 3.1.8 +""" + +import asyncio +import threading +import warnings +from contextlib import contextmanager + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.db import DatabaseError as WrappedDatabaseError +from django.db import connections +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.utils import CursorDebugWrapper as BaseCursorDebugWrapper +from django.utils.asyncio import async_unsafe +from django.utils.functional import cached_property +from django.utils.safestring import SafeString +from django.utils.version import get_version_tuple + +try: + try: + import psycopg as Database + except ImportError: + import psycopg2 as Database +except ImportError: + raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") + + +def psycopg_version(): + version = Database.__version__.split(" ", 1)[0] + return get_version_tuple(version) + + +if psycopg_version() < (2, 8, 4): + raise ImproperlyConfigured( + f"psycopg2 version 2.8.4 or newer is required; you have {Database.__version__}" + ) +if (3,) <= psycopg_version() < (3, 1, 8): + raise ImproperlyConfigured( + f"psycopg version 3.1.8 or newer is required; you have {Database.__version__}" + ) + + +from .psycopg_any import IsolationLevel, is_psycopg3 # NOQA isort:skip + +if is_psycopg3: + from psycopg import adapters, sql + from psycopg.pq import Format + + from .psycopg_any import get_adapters_template, register_tzloader + + TIMESTAMPTZ_OID = adapters.types["timestamptz"].oid + +else: + import psycopg2.extensions + import psycopg2.extras + + psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) + psycopg2.extras.register_uuid() + + # Register support for inet[] manually so we don't have to handle the Inet() + # object on load all the time. + INETARRAY_OID = 1041 + INETARRAY = psycopg2.extensions.new_array_type( + (INETARRAY_OID,), + "INETARRAY", + psycopg2.extensions.UNICODE, + ) + psycopg2.extensions.register_type(INETARRAY) + +# Some of these import psycopg, so import them after checking if it's installed. +from .client import DatabaseClient # NOQA isort:skip +from .creation import DatabaseCreation # NOQA isort:skip +from .features import DatabaseFeatures # NOQA isort:skip +from .introspection import DatabaseIntrospection # NOQA isort:skip +from .operations import DatabaseOperations # NOQA isort:skip +from .schema import DatabaseSchemaEditor # NOQA isort:skip + + +def _get_varchar_column(data): + if data["max_length"] is None: + return "varchar" + return "varchar(%(max_length)s)" % data + + +class DatabaseWrapper(BaseDatabaseWrapper): + vendor = "postgresql" + display_name = "PostgreSQL" + # This dictionary maps Field objects to their associated PostgreSQL column + # types, as strings. Column-type strings can contain format strings; they'll + # be interpolated against the values of Field.__dict__ before being output. + # If a column type is set to None, it won't be included in the output. + data_types = { + "AutoField": "integer", + "BigAutoField": "bigint", + "BinaryField": "bytea", + "BooleanField": "boolean", + "CharField": _get_varchar_column, + "DateField": "date", + "DateTimeField": "timestamp with time zone", + "DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)", + "DurationField": "interval", + "FileField": "varchar(%(max_length)s)", + "FilePathField": "varchar(%(max_length)s)", + "FloatField": "double precision", + "IntegerField": "integer", + "BigIntegerField": "bigint", + "IPAddressField": "inet", + "GenericIPAddressField": "inet", + "JSONField": "jsonb", + "OneToOneField": "integer", + "PositiveBigIntegerField": "bigint", + "PositiveIntegerField": "integer", + "PositiveSmallIntegerField": "smallint", + "SlugField": "varchar(%(max_length)s)", + "SmallAutoField": "smallint", + "SmallIntegerField": "smallint", + "TextField": "text", + "TimeField": "time", + "UUIDField": "uuid", + } + data_type_check_constraints = { + "PositiveBigIntegerField": '"%(column)s" >= 0', + "PositiveIntegerField": '"%(column)s" >= 0', + "PositiveSmallIntegerField": '"%(column)s" >= 0', + } + data_types_suffix = { + "AutoField": "GENERATED BY DEFAULT AS IDENTITY", + "BigAutoField": "GENERATED BY DEFAULT AS IDENTITY", + "SmallAutoField": "GENERATED BY DEFAULT AS IDENTITY", + } + operators = { + "exact": "= %s", + "iexact": "= UPPER(%s)", + "contains": "LIKE %s", + "icontains": "LIKE UPPER(%s)", + "regex": "~ %s", + "iregex": "~* %s", + "gt": "> %s", + "gte": ">= %s", + "lt": "< %s", + "lte": "<= %s", + "startswith": "LIKE %s", + "endswith": "LIKE %s", + "istartswith": "LIKE UPPER(%s)", + "iendswith": "LIKE UPPER(%s)", + } + + # The patterns below are used to generate SQL pattern lookup clauses when + # the right-hand side of the lookup isn't a raw string (it might be an expression + # or the result of a bilateral transformation). + # In those cases, special characters for LIKE operators (e.g. \, *, _) should be + # escaped on database side. + # + # Note: we use str.format() here for readability as '%' is used as a wildcard for + # the LIKE operator. + pattern_esc = ( + r"REPLACE(REPLACE(REPLACE({}, E'\\', E'\\\\'), E'%%', E'\\%%'), E'_', E'\\_')" + ) + pattern_ops = { + "contains": "LIKE '%%' || {} || '%%'", + "icontains": "LIKE '%%' || UPPER({}) || '%%'", + "startswith": "LIKE {} || '%%'", + "istartswith": "LIKE UPPER({}) || '%%'", + "endswith": "LIKE '%%' || {}", + "iendswith": "LIKE '%%' || UPPER({})", + } + + Database = Database + SchemaEditorClass = DatabaseSchemaEditor + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + # PostgreSQL backend-specific attributes. + _named_cursor_idx = 0 + + def get_database_version(self): + """ + Return a tuple of the database's version. + E.g. for pg_version 120004, return (12, 4). + """ + return divmod(self.pg_version, 10000) + + def get_connection_params(self): + settings_dict = self.settings_dict + # None may be used to connect to the default 'postgres' db + if settings_dict["NAME"] == "" and not settings_dict.get("OPTIONS", {}).get( + "service" + ): + raise ImproperlyConfigured( + "settings.DATABASES is improperly configured. " + "Please supply the NAME or OPTIONS['service'] value." + ) + if len(settings_dict["NAME"] or "") > self.ops.max_name_length(): + raise ImproperlyConfigured( + "The database name '%s' (%d characters) is longer than " + "PostgreSQL's limit of %d characters. Supply a shorter NAME " + "in settings.DATABASES." + % ( + settings_dict["NAME"], + len(settings_dict["NAME"]), + self.ops.max_name_length(), + ) + ) + if settings_dict["NAME"]: + conn_params = { + "dbname": settings_dict["NAME"], + **settings_dict["OPTIONS"], + } + elif settings_dict["NAME"] is None: + # Connect to the default 'postgres' db. + settings_dict.get("OPTIONS", {}).pop("service", None) + conn_params = {"dbname": "postgres", **settings_dict["OPTIONS"]} + else: + conn_params = {**settings_dict["OPTIONS"]} + conn_params["client_encoding"] = "UTF8" + + conn_params.pop("assume_role", None) + conn_params.pop("isolation_level", None) + server_side_binding = conn_params.pop("server_side_binding", None) + conn_params.setdefault( + "cursor_factory", + ServerBindingCursor + if is_psycopg3 and server_side_binding is True + else Cursor, + ) + if settings_dict["USER"]: + conn_params["user"] = settings_dict["USER"] + if settings_dict["PASSWORD"]: + conn_params["password"] = settings_dict["PASSWORD"] + if settings_dict["HOST"]: + conn_params["host"] = settings_dict["HOST"] + if settings_dict["PORT"]: + conn_params["port"] = settings_dict["PORT"] + if is_psycopg3: + conn_params["context"] = get_adapters_template( + settings.USE_TZ, self.timezone + ) + # Disable prepared statements by default to keep connection poolers + # working. Can be reenabled via OPTIONS in the settings dict. + conn_params["prepare_threshold"] = conn_params.pop( + "prepare_threshold", None + ) + return conn_params + + @async_unsafe + def get_new_connection(self, conn_params): + # self.isolation_level must be set: + # - after connecting to the database in order to obtain the database's + # default when no value is explicitly specified in options. + # - before calling _set_autocommit() because if autocommit is on, that + # will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT. + options = self.settings_dict["OPTIONS"] + set_isolation_level = False + try: + isolation_level_value = options["isolation_level"] + except KeyError: + self.isolation_level = IsolationLevel.READ_COMMITTED + else: + # Set the isolation level to the value from OPTIONS. + try: + self.isolation_level = IsolationLevel(isolation_level_value) + set_isolation_level = True + except ValueError: + raise ImproperlyConfigured( + f"Invalid transaction isolation level {isolation_level_value} " + f"specified. Use one of the psycopg.IsolationLevel values." + ) + connection = self.Database.connect(**conn_params) + if set_isolation_level: + connection.isolation_level = self.isolation_level + if not is_psycopg3: + # Register dummy loads() to avoid a round trip from psycopg2's + # decode to json.dumps() to json.loads(), when using a custom + # decoder in JSONField. + psycopg2.extras.register_default_jsonb( + conn_or_curs=connection, loads=lambda x: x + ) + return connection + + def ensure_timezone(self): + if self.connection is None: + return False + conn_timezone_name = self.connection.info.parameter_status("TimeZone") + timezone_name = self.timezone_name + if timezone_name and conn_timezone_name != timezone_name: + with self.connection.cursor() as cursor: + cursor.execute(self.ops.set_time_zone_sql(), [timezone_name]) + return True + return False + + def ensure_role(self): + if self.connection is None: + return False + if new_role := self.settings_dict.get("OPTIONS", {}).get("assume_role"): + with self.connection.cursor() as cursor: + sql = self.ops.compose_sql("SET ROLE %s", [new_role]) + cursor.execute(sql) + return True + return False + + def init_connection_state(self): + super().init_connection_state() + + # Commit after setting the time zone. + commit_tz = self.ensure_timezone() + # Set the role on the connection. This is useful if the credential used + # to login is not the same as the role that owns database resources. As + # can be the case when using temporary or ephemeral credentials. + commit_role = self.ensure_role() + + if (commit_role or commit_tz) and not self.get_autocommit(): + self.connection.commit() + + @async_unsafe + def create_cursor(self, name=None): + if name: + # In autocommit mode, the cursor will be used outside of a + # transaction, hence use a holdable cursor. + cursor = self.connection.cursor( + name, scrollable=False, withhold=self.connection.autocommit + ) + else: + cursor = self.connection.cursor() + + if is_psycopg3: + # Register the cursor timezone only if the connection disagrees, to + # avoid copying the adapter map. + tzloader = self.connection.adapters.get_loader(TIMESTAMPTZ_OID, Format.TEXT) + if self.timezone != tzloader.timezone: + register_tzloader(self.timezone, cursor) + else: + cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None + return cursor + + def tzinfo_factory(self, offset): + return self.timezone + + @async_unsafe + def chunked_cursor(self): + self._named_cursor_idx += 1 + # Get the current async task + # Note that right now this is behind @async_unsafe, so this is + # unreachable, but in future we'll start loosening this restriction. + # For now, it's here so that every use of "threading" is + # also async-compatible. + try: + current_task = asyncio.current_task() + except RuntimeError: + current_task = None + # Current task can be none even if the current_task call didn't error + if current_task: + task_ident = str(id(current_task)) + else: + task_ident = "sync" + # Use that and the thread ident to get a unique name + return self._cursor( + name="_django_curs_%d_%s_%d" + % ( + # Avoid reusing name in other threads / tasks + threading.current_thread().ident, + task_ident, + self._named_cursor_idx, + ) + ) + + def _set_autocommit(self, autocommit): + with self.wrap_database_errors: + self.connection.autocommit = autocommit + + def check_constraints(self, table_names=None): + """ + Check constraints by setting them to immediate. Return them to deferred + afterward. + """ + with self.cursor() as cursor: + cursor.execute("SET CONSTRAINTS ALL IMMEDIATE") + cursor.execute("SET CONSTRAINTS ALL DEFERRED") + + def is_usable(self): + try: + # Use a psycopg cursor directly, bypassing Django's utilities. + with self.connection.cursor() as cursor: + cursor.execute("SELECT 1") + except Database.Error: + return False + else: + return True + + @contextmanager + def _nodb_cursor(self): + cursor = None + try: + with super()._nodb_cursor() as cursor: + yield cursor + except (Database.DatabaseError, WrappedDatabaseError): + if cursor is not None: + raise + warnings.warn( + "Normally Django will use a connection to the 'postgres' database " + "to avoid running initialization queries against the production " + "database when it's not needed (for example, when running tests). " + "Django was unable to create a connection to the 'postgres' database " + "and will use the first PostgreSQL database instead.", + RuntimeWarning, + ) + for connection in connections.all(): + if ( + connection.vendor == "postgresql" + and connection.settings_dict["NAME"] != "postgres" + ): + conn = self.__class__( + { + **self.settings_dict, + "NAME": connection.settings_dict["NAME"], + }, + alias=self.alias, + ) + try: + with conn.cursor() as cursor: + yield cursor + finally: + conn.close() + break + else: + raise + + @cached_property + def pg_version(self): + with self.temporary_connection(): + return self.connection.info.server_version + + def make_debug_cursor(self, cursor): + return CursorDebugWrapper(cursor, self) + + +if is_psycopg3: + + class CursorMixin: + """ + A subclass of psycopg cursor implementing callproc. + """ + + def callproc(self, name, args=None): + if not isinstance(name, sql.Identifier): + name = sql.Identifier(name) + + qparts = [sql.SQL("SELECT * FROM "), name, sql.SQL("(")] + if args: + for item in args: + qparts.append(sql.Literal(item)) + qparts.append(sql.SQL(",")) + del qparts[-1] + + qparts.append(sql.SQL(")")) + stmt = sql.Composed(qparts) + self.execute(stmt) + return args + + class ServerBindingCursor(CursorMixin, Database.Cursor): + pass + + class Cursor(CursorMixin, Database.ClientCursor): + pass + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy(self, statement): + with self.debug_sql(statement): + return self.cursor.copy(statement) + +else: + Cursor = psycopg2.extensions.cursor + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy_expert(self, sql, file, *args): + with self.debug_sql(sql): + return self.cursor.copy_expert(sql, file, *args) + + def copy_to(self, file, table, *args, **kwargs): + with self.debug_sql(sql="COPY %s TO STDOUT" % table): + return self.cursor.copy_to(file, table, *args, **kwargs) diff --git a/testbed/django__django/django/db/backends/postgresql/client.py b/testbed/django__django/django/db/backends/postgresql/client.py new file mode 100644 index 0000000000000000000000000000000000000000..3b5ddafacaaadeccb1199f898b221e9880c2eae0 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/client.py @@ -0,0 +1,64 @@ +import signal + +from django.db.backends.base.client import BaseDatabaseClient + + +class DatabaseClient(BaseDatabaseClient): + executable_name = "psql" + + @classmethod + def settings_to_cmd_args_env(cls, settings_dict, parameters): + args = [cls.executable_name] + options = settings_dict.get("OPTIONS", {}) + + host = settings_dict.get("HOST") + port = settings_dict.get("PORT") + dbname = settings_dict.get("NAME") + user = settings_dict.get("USER") + passwd = settings_dict.get("PASSWORD") + passfile = options.get("passfile") + service = options.get("service") + sslmode = options.get("sslmode") + sslrootcert = options.get("sslrootcert") + sslcert = options.get("sslcert") + sslkey = options.get("sslkey") + + if not dbname and not service: + # Connect to the default 'postgres' db. + dbname = "postgres" + if user: + args += ["-U", user] + if host: + args += ["-h", host] + if port: + args += ["-p", str(port)] + args.extend(parameters) + if dbname: + args += [dbname] + + env = {} + if passwd: + env["PGPASSWORD"] = str(passwd) + if service: + env["PGSERVICE"] = str(service) + if sslmode: + env["PGSSLMODE"] = str(sslmode) + if sslrootcert: + env["PGSSLROOTCERT"] = str(sslrootcert) + if sslcert: + env["PGSSLCERT"] = str(sslcert) + if sslkey: + env["PGSSLKEY"] = str(sslkey) + if passfile: + env["PGPASSFILE"] = str(passfile) + return args, (env or None) + + def runshell(self, parameters): + sigint_handler = signal.getsignal(signal.SIGINT) + try: + # Allow SIGINT to pass to psql to abort queries. + signal.signal(signal.SIGINT, signal.SIG_IGN) + super().runshell(parameters) + finally: + # Restore the original SIGINT handler. + signal.signal(signal.SIGINT, sigint_handler) diff --git a/testbed/django__django/django/db/backends/postgresql/creation.py b/testbed/django__django/django/db/backends/postgresql/creation.py new file mode 100644 index 0000000000000000000000000000000000000000..9b562cec18eacb20d2b9c8e124d1402c911ae956 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/creation.py @@ -0,0 +1,86 @@ +import sys + +from django.core.exceptions import ImproperlyConfigured +from django.db.backends.base.creation import BaseDatabaseCreation +from django.db.backends.postgresql.psycopg_any import errors +from django.db.backends.utils import strip_quotes + + +class DatabaseCreation(BaseDatabaseCreation): + def _quote_name(self, name): + return self.connection.ops.quote_name(name) + + def _get_database_create_suffix(self, encoding=None, template=None): + suffix = "" + if encoding: + suffix += " ENCODING '{}'".format(encoding) + if template: + suffix += " TEMPLATE {}".format(self._quote_name(template)) + return suffix and "WITH" + suffix + + def sql_table_creation_suffix(self): + test_settings = self.connection.settings_dict["TEST"] + if test_settings.get("COLLATION") is not None: + raise ImproperlyConfigured( + "PostgreSQL does not support collation setting at database " + "creation time." + ) + return self._get_database_create_suffix( + encoding=test_settings["CHARSET"], + template=test_settings.get("TEMPLATE"), + ) + + def _database_exists(self, cursor, database_name): + cursor.execute( + "SELECT 1 FROM pg_catalog.pg_database WHERE datname = %s", + [strip_quotes(database_name)], + ) + return cursor.fetchone() is not None + + def _execute_create_test_db(self, cursor, parameters, keepdb=False): + try: + if keepdb and self._database_exists(cursor, parameters["dbname"]): + # If the database should be kept and it already exists, don't + # try to create a new one. + return + super()._execute_create_test_db(cursor, parameters, keepdb) + except Exception as e: + if not isinstance(e.__cause__, errors.DuplicateDatabase): + # All errors except "database already exists" cancel tests. + self.log("Got an error creating the test database: %s" % e) + sys.exit(2) + elif not keepdb: + # If the database should be kept, ignore "database already + # exists". + raise + + def _clone_test_db(self, suffix, verbosity, keepdb=False): + # CREATE DATABASE ... WITH TEMPLATE ... requires closing connections + # to the template database. + self.connection.close() + + source_database_name = self.connection.settings_dict["NAME"] + target_database_name = self.get_test_db_clone_settings(suffix)["NAME"] + test_db_params = { + "dbname": self._quote_name(target_database_name), + "suffix": self._get_database_create_suffix(template=source_database_name), + } + with self._nodb_cursor() as cursor: + try: + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception: + try: + if verbosity >= 1: + self.log( + "Destroying old test database for alias %s..." + % ( + self._get_database_display_str( + verbosity, target_database_name + ), + ) + ) + cursor.execute("DROP DATABASE %(dbname)s" % test_db_params) + self._execute_create_test_db(cursor, test_db_params, keepdb) + except Exception as e: + self.log("Got an error cloning the test database: %s" % e) + sys.exit(2) diff --git a/testbed/django__django/django/db/backends/postgresql/introspection.py b/testbed/django__django/django/db/backends/postgresql/introspection.py new file mode 100644 index 0000000000000000000000000000000000000000..69bc8712bdb477e32361fe3bf2be2cec09681c38 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/introspection.py @@ -0,0 +1,299 @@ +from collections import namedtuple + +from django.db.backends.base.introspection import BaseDatabaseIntrospection +from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo +from django.db.models import Index + +FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield", "comment")) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) + + +class DatabaseIntrospection(BaseDatabaseIntrospection): + # Maps type codes to Django Field types. + data_types_reverse = { + 16: "BooleanField", + 17: "BinaryField", + 20: "BigIntegerField", + 21: "SmallIntegerField", + 23: "IntegerField", + 25: "TextField", + 700: "FloatField", + 701: "FloatField", + 869: "GenericIPAddressField", + 1042: "CharField", # blank-padded + 1043: "CharField", + 1082: "DateField", + 1083: "TimeField", + 1114: "DateTimeField", + 1184: "DateTimeField", + 1186: "DurationField", + 1266: "TimeField", + 1700: "DecimalField", + 2950: "UUIDField", + 3802: "JSONField", + } + # A hook for subclasses. + index_default_access_method = "btree" + + ignored_tables = [] + + def get_field_type(self, data_type, description): + field_type = super().get_field_type(data_type, description) + if description.is_autofield or ( + # Required for pre-Django 4.1 serial columns. + description.default + and "nextval" in description.default + ): + if field_type == "IntegerField": + return "AutoField" + elif field_type == "BigIntegerField": + return "BigAutoField" + elif field_type == "SmallIntegerField": + return "SmallAutoField" + return field_type + + def get_table_list(self, cursor): + """Return a list of table and view names in the current database.""" + cursor.execute( + """ + SELECT + c.relname, + CASE + WHEN c.relispartition THEN 'p' + WHEN c.relkind IN ('m', 'v') THEN 'v' + ELSE 't' + END, + obj_description(c.oid, 'pg_class') + FROM pg_catalog.pg_class c + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') + AND n.nspname NOT IN ('pg_catalog', 'pg_toast') + AND pg_catalog.pg_table_is_visible(c.oid) + """ + ) + return [ + TableInfo(*row) + for row in cursor.fetchall() + if row[0] not in self.ignored_tables + ] + + def get_table_description(self, cursor, table_name): + """ + Return a description of the table with the DB-API cursor.description + interface. + """ + # Query the pg_catalog tables as cursor.description does not reliably + # return the nullable property and information_schema.columns does not + # contain details of materialized views. + cursor.execute( + """ + SELECT + a.attname AS column_name, + NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable, + pg_get_expr(ad.adbin, ad.adrelid) AS column_default, + CASE WHEN collname = 'default' THEN NULL ELSE collname END AS collation, + a.attidentity != '' AS is_autofield, + col_description(a.attrelid, a.attnum) AS column_comment + FROM pg_attribute a + LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum + LEFT JOIN pg_collation co ON a.attcollation = co.oid + JOIN pg_type t ON a.atttypid = t.oid + JOIN pg_class c ON a.attrelid = c.oid + JOIN pg_namespace n ON c.relnamespace = n.oid + WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') + AND c.relname = %s + AND n.nspname NOT IN ('pg_catalog', 'pg_toast') + AND pg_catalog.pg_table_is_visible(c.oid) + """, + [table_name], + ) + field_map = {line[0]: line[1:] for line in cursor.fetchall()} + cursor.execute( + "SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name) + ) + return [ + FieldInfo( + line.name, + line.type_code, + # display_size is always None on psycopg2. + line.internal_size if line.display_size is None else line.display_size, + line.internal_size, + line.precision, + line.scale, + *field_map[line.name], + ) + for line in cursor.description + ] + + def get_sequences(self, cursor, table_name, table_fields=()): + cursor.execute( + """ + SELECT + s.relname AS sequence_name, + a.attname AS colname + FROM + pg_class s + JOIN pg_depend d ON d.objid = s.oid + AND d.classid = 'pg_class'::regclass + AND d.refclassid = 'pg_class'::regclass + JOIN pg_attribute a ON d.refobjid = a.attrelid + AND d.refobjsubid = a.attnum + JOIN pg_class tbl ON tbl.oid = d.refobjid + AND tbl.relname = %s + AND pg_catalog.pg_table_is_visible(tbl.oid) + WHERE + s.relkind = 'S'; + """, + [table_name], + ) + return [ + {"name": row[0], "table": table_name, "column": row[1]} + for row in cursor.fetchall() + ] + + def get_relations(self, cursor, table_name): + """ + Return a dictionary of {field_name: (field_name_other_table, other_table)} + representing all foreign keys in the given table. + """ + cursor.execute( + """ + SELECT a1.attname, c2.relname, a2.attname + FROM pg_constraint con + LEFT JOIN pg_class c1 ON con.conrelid = c1.oid + LEFT JOIN pg_class c2 ON con.confrelid = c2.oid + LEFT JOIN + pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1] + LEFT JOIN + pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1] + WHERE + c1.relname = %s AND + con.contype = 'f' AND + c1.relnamespace = c2.relnamespace AND + pg_catalog.pg_table_is_visible(c1.oid) + """, + [table_name], + ) + return {row[0]: (row[2], row[1]) for row in cursor.fetchall()} + + def get_constraints(self, cursor, table_name): + """ + Retrieve any constraints or keys (unique, pk, fk, check, index) across + one or more columns. Also retrieve the definition of expression-based + indexes. + """ + constraints = {} + # Loop over the key table, collecting things as constraints. The column + # array must return column names in the same order in which they were + # created. + cursor.execute( + """ + SELECT + c.conname, + array( + SELECT attname + FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx) + JOIN pg_attribute AS ca ON cols.colid = ca.attnum + WHERE ca.attrelid = c.conrelid + ORDER BY cols.arridx + ), + c.contype, + (SELECT fkc.relname || '.' || fka.attname + FROM pg_attribute AS fka + JOIN pg_class AS fkc ON fka.attrelid = fkc.oid + WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]), + cl.reloptions + FROM pg_constraint AS c + JOIN pg_class AS cl ON c.conrelid = cl.oid + WHERE cl.relname = %s AND pg_catalog.pg_table_is_visible(cl.oid) + """, + [table_name], + ) + for constraint, columns, kind, used_cols, options in cursor.fetchall(): + constraints[constraint] = { + "columns": columns, + "primary_key": kind == "p", + "unique": kind in ["p", "u"], + "foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None, + "check": kind == "c", + "index": False, + "definition": None, + "options": options, + } + # Now get indexes + cursor.execute( + """ + SELECT + indexname, + array_agg(attname ORDER BY arridx), + indisunique, + indisprimary, + array_agg(ordering ORDER BY arridx), + amname, + exprdef, + s2.attoptions + FROM ( + SELECT + c2.relname as indexname, idx.*, attr.attname, am.amname, + CASE + WHEN idx.indexprs IS NOT NULL THEN + pg_get_indexdef(idx.indexrelid) + END AS exprdef, + CASE am.amname + WHEN %s THEN + CASE (option & 1) + WHEN 1 THEN 'DESC' ELSE 'ASC' + END + END as ordering, + c2.reloptions as attoptions + FROM ( + SELECT * + FROM + pg_index i, + unnest(i.indkey, i.indoption) + WITH ORDINALITY koi(key, option, arridx) + ) idx + LEFT JOIN pg_class c ON idx.indrelid = c.oid + LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid + LEFT JOIN pg_am am ON c2.relam = am.oid + LEFT JOIN + pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key + WHERE c.relname = %s AND pg_catalog.pg_table_is_visible(c.oid) + ) s2 + GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions; + """, + [self.index_default_access_method, table_name], + ) + for ( + index, + columns, + unique, + primary, + orders, + type_, + definition, + options, + ) in cursor.fetchall(): + if index not in constraints: + basic_index = ( + type_ == self.index_default_access_method + and + # '_btree' references + # django.contrib.postgres.indexes.BTreeIndex.suffix. + not index.endswith("_btree") + and options is None + ) + constraints[index] = { + "columns": columns if columns != [None] else [], + "orders": orders if orders != [None] else [], + "primary_key": primary, + "unique": unique, + "foreign_key": None, + "check": False, + "index": True, + "type": Index.suffix if basic_index else type_, + "definition": definition, + "options": options, + } + return constraints diff --git a/testbed/django__django/django/db/backends/postgresql/psycopg_any.py b/testbed/django__django/django/db/backends/postgresql/psycopg_any.py new file mode 100644 index 0000000000000000000000000000000000000000..1fe6b15cafc937930341d6f645f06d824ef9d569 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/psycopg_any.py @@ -0,0 +1,103 @@ +import ipaddress +from functools import lru_cache + +try: + from psycopg import ClientCursor, IsolationLevel, adapt, adapters, errors, sql + from psycopg.postgres import types + from psycopg.types.datetime import TimestamptzLoader + from psycopg.types.json import Jsonb + from psycopg.types.range import Range, RangeDumper + from psycopg.types.string import TextLoader + + Inet = ipaddress.ip_address + + DateRange = DateTimeRange = DateTimeTZRange = NumericRange = Range + RANGE_TYPES = (Range,) + + TSRANGE_OID = types["tsrange"].oid + TSTZRANGE_OID = types["tstzrange"].oid + + def mogrify(sql, params, connection): + with connection.cursor() as cursor: + return ClientCursor(cursor.connection).mogrify(sql, params) + + # Adapters. + class BaseTzLoader(TimestamptzLoader): + """ + Load a PostgreSQL timestamptz using the a specific timezone. + The timezone can be None too, in which case it will be chopped. + """ + + timezone = None + + def load(self, data): + res = super().load(data) + return res.replace(tzinfo=self.timezone) + + def register_tzloader(tz, context): + class SpecificTzLoader(BaseTzLoader): + timezone = tz + + context.adapters.register_loader("timestamptz", SpecificTzLoader) + + class DjangoRangeDumper(RangeDumper): + """A Range dumper customized for Django.""" + + def upgrade(self, obj, format): + # Dump ranges containing naive datetimes as tstzrange, because + # Django doesn't use tz-aware ones. + dumper = super().upgrade(obj, format) + if dumper is not self and dumper.oid == TSRANGE_OID: + dumper.oid = TSTZRANGE_OID + return dumper + + @lru_cache + def get_adapters_template(use_tz, timezone): + # Create at adapters map extending the base one. + ctx = adapt.AdaptersMap(adapters) + # Register a no-op dumper to avoid a round trip from psycopg version 3 + # decode to json.dumps() to json.loads(), when using a custom decoder + # in JSONField. + ctx.register_loader("jsonb", TextLoader) + # Don't convert automatically from PostgreSQL network types to Python + # ipaddress. + ctx.register_loader("inet", TextLoader) + ctx.register_loader("cidr", TextLoader) + ctx.register_dumper(Range, DjangoRangeDumper) + # Register a timestamptz loader configured on self.timezone. + # This, however, can be overridden by create_cursor. + register_tzloader(timezone, ctx) + return ctx + + is_psycopg3 = True + +except ImportError: + from enum import IntEnum + + from psycopg2 import errors, extensions, sql # NOQA + from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, Inet # NOQA + from psycopg2.extras import Json as Jsonb # NOQA + from psycopg2.extras import NumericRange, Range # NOQA + + RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange) + + class IsolationLevel(IntEnum): + READ_UNCOMMITTED = extensions.ISOLATION_LEVEL_READ_UNCOMMITTED + READ_COMMITTED = extensions.ISOLATION_LEVEL_READ_COMMITTED + REPEATABLE_READ = extensions.ISOLATION_LEVEL_REPEATABLE_READ + SERIALIZABLE = extensions.ISOLATION_LEVEL_SERIALIZABLE + + def _quote(value, connection=None): + adapted = extensions.adapt(value) + if hasattr(adapted, "encoding"): + adapted.encoding = "utf8" + # getquoted() returns a quoted bytestring of the adapted value. + return adapted.getquoted().decode() + + sql.quote = _quote + + def mogrify(sql, params, connection): + with connection.cursor() as cursor: + return cursor.mogrify(sql, params).decode() + + is_psycopg3 = False diff --git a/testbed/django__django/django/db/backends/postgresql/schema.py b/testbed/django__django/django/db/backends/postgresql/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..40fef6660e8818bef201bb570b5b64bd576666e6 --- /dev/null +++ b/testbed/django__django/django/db/backends/postgresql/schema.py @@ -0,0 +1,374 @@ +from django.db.backends.base.schema import BaseDatabaseSchemaEditor +from django.db.backends.ddl_references import IndexColumns +from django.db.backends.postgresql.psycopg_any import sql +from django.db.backends.utils import strip_quotes + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + # Setting all constraints to IMMEDIATE to allow changing data in the same + # transaction. + sql_update_with_default = ( + "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" + "; SET CONSTRAINTS ALL IMMEDIATE" + ) + sql_alter_sequence_type = "ALTER SEQUENCE IF EXISTS %(sequence)s AS %(type)s" + sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE" + + sql_create_index = ( + "CREATE INDEX %(name)s ON %(table)s%(using)s " + "(%(columns)s)%(include)s%(extra)s%(condition)s" + ) + sql_create_index_concurrently = ( + "CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s " + "(%(columns)s)%(include)s%(extra)s%(condition)s" + ) + sql_delete_index = "DROP INDEX IF EXISTS %(name)s" + sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s" + + # Setting the constraint to IMMEDIATE to allow changing data in the same + # transaction. + sql_create_column_inline_fk = ( + "CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s" + "; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE" + ) + # Setting the constraint to IMMEDIATE runs any deferred checks to allow + # dropping it in the same transaction. + sql_delete_fk = ( + "SET CONSTRAINTS %(name)s IMMEDIATE; " + "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + ) + sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)" + + def execute(self, sql, params=()): + # Merge the query client-side, as PostgreSQL won't do it server-side. + if params is None: + return super().execute(sql, params) + sql = self.connection.ops.compose_sql(str(sql), params) + # Don't let the superclass touch anything. + return super().execute(sql, None) + + sql_add_identity = ( + "ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD " + "GENERATED BY DEFAULT AS IDENTITY" + ) + sql_drop_indentity = ( + "ALTER TABLE %(table)s ALTER COLUMN %(column)s DROP IDENTITY IF EXISTS" + ) + + def quote_value(self, value): + return sql.quote(value, self.connection.connection) + + def _field_indexes_sql(self, model, field): + output = super()._field_indexes_sql(model, field) + like_index_statement = self._create_like_index_sql(model, field) + if like_index_statement is not None: + output.append(like_index_statement) + return output + + def _field_data_type(self, field): + if field.is_relation: + return field.rel_db_type(self.connection) + return self.connection.data_types.get( + field.get_internal_type(), + field.db_type(self.connection), + ) + + def _field_base_data_types(self, field): + # Yield base data types for array fields. + if field.base_field.get_internal_type() == "ArrayField": + yield from self._field_base_data_types(field.base_field) + else: + yield self._field_data_type(field.base_field) + + def _create_like_index_sql(self, model, field): + """ + Return the statement to create an index with varchar operator pattern + when the column type is 'varchar' or 'text', otherwise return None. + """ + db_type = field.db_type(connection=self.connection) + if db_type is not None and (field.db_index or field.unique): + # Fields with database column types of `varchar` and `text` need + # a second index that specifies their operator class, which is + # needed when performing correct LIKE queries outside the + # C locale. See #12234. + # + # The same doesn't apply to array fields such as varchar[size] + # and text[size], so skip them. + if "[" in db_type: + return None + # Non-deterministic collations on Postgresql don't support indexes + # for operator classes varchar_pattern_ops/text_pattern_ops. + if getattr(field, "db_collation", None) or ( + field.is_relation and getattr(field.target_field, "db_collation", None) + ): + return None + if db_type.startswith("varchar"): + return self._create_index_sql( + model, + fields=[field], + suffix="_like", + opclasses=["varchar_pattern_ops"], + ) + elif db_type.startswith("text"): + return self._create_index_sql( + model, + fields=[field], + suffix="_like", + opclasses=["text_pattern_ops"], + ) + return None + + def _using_sql(self, new_field, old_field): + using_sql = " USING %(column)s::%(type)s" + new_internal_type = new_field.get_internal_type() + old_internal_type = old_field.get_internal_type() + if new_internal_type == "ArrayField" and new_internal_type == old_internal_type: + # Compare base data types for array fields. + if list(self._field_base_data_types(old_field)) != list( + self._field_base_data_types(new_field) + ): + return using_sql + elif self._field_data_type(old_field) != self._field_data_type(new_field): + return using_sql + return "" + + def _get_sequence_name(self, table, column): + with self.connection.cursor() as cursor: + for sequence in self.connection.introspection.get_sequences(cursor, table): + if sequence["column"] == column: + return sequence["name"] + return None + + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): + # Drop indexes on varchar/text/citext columns that are changing to a + # different type. + old_db_params = old_field.db_parameters(connection=self.connection) + old_type = old_db_params["type"] + if (old_field.db_index or old_field.unique) and ( + (old_type.startswith("varchar") and not new_type.startswith("varchar")) + or (old_type.startswith("text") and not new_type.startswith("text")) + or (old_type.startswith("citext") and not new_type.startswith("citext")) + ): + index_name = self._create_index_name( + model._meta.db_table, [old_field.column], suffix="_like" + ) + self.execute(self._delete_index_sql(model, index_name)) + + self.sql_alter_column_type = ( + "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" + ) + # Cast when data type changed. + if using_sql := self._using_sql(new_field, old_field): + self.sql_alter_column_type += using_sql + new_internal_type = new_field.get_internal_type() + old_internal_type = old_field.get_internal_type() + # Make ALTER TYPE with IDENTITY make sense. + table = strip_quotes(model._meta.db_table) + auto_field_types = { + "AutoField", + "BigAutoField", + "SmallAutoField", + } + old_is_auto = old_internal_type in auto_field_types + new_is_auto = new_internal_type in auto_field_types + if new_is_auto and not old_is_auto: + column = strip_quotes(new_field.column) + return ( + ( + self.sql_alter_column_type + % { + "column": self.quote_name(column), + "type": new_type, + "collation": "", + }, + [], + ), + [ + ( + self.sql_add_identity + % { + "table": self.quote_name(table), + "column": self.quote_name(column), + }, + [], + ), + ], + ) + elif old_is_auto and not new_is_auto: + # Drop IDENTITY if exists (pre-Django 4.1 serial columns don't have + # it). + self.execute( + self.sql_drop_indentity + % { + "table": self.quote_name(table), + "column": self.quote_name(strip_quotes(new_field.column)), + } + ) + column = strip_quotes(new_field.column) + fragment, _ = super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + # Drop the sequence if exists (Django 4.1+ identity columns don't + # have it). + other_actions = [] + if sequence_name := self._get_sequence_name(table, column): + other_actions = [ + ( + self.sql_delete_sequence + % { + "sequence": self.quote_name(sequence_name), + }, + [], + ) + ] + return fragment, other_actions + elif new_is_auto and old_is_auto and old_internal_type != new_internal_type: + fragment, _ = super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + column = strip_quotes(new_field.column) + db_types = { + "AutoField": "integer", + "BigAutoField": "bigint", + "SmallAutoField": "smallint", + } + # Alter the sequence type if exists (Django 4.1+ identity columns + # don't have it). + other_actions = [] + if sequence_name := self._get_sequence_name(table, column): + other_actions = [ + ( + self.sql_alter_sequence_type + % { + "sequence": self.quote_name(sequence_name), + "type": db_types[new_internal_type], + }, + [], + ), + ] + return fragment, other_actions + else: + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) + + def _alter_column_collation_sql( + self, model, new_field, new_type, new_collation, old_field + ): + sql = self.sql_alter_column_collate + # Cast when data type changed. + if using_sql := self._using_sql(new_field, old_field): + sql += using_sql + return ( + sql + % { + "column": self.quote_name(new_field.column), + "type": new_type, + "collation": " " + self._collate_sql(new_collation) + if new_collation + else "", + }, + [], + ) + + def _alter_field( + self, + model, + old_field, + new_field, + old_type, + new_type, + old_db_params, + new_db_params, + strict=False, + ): + super()._alter_field( + model, + old_field, + new_field, + old_type, + new_type, + old_db_params, + new_db_params, + strict, + ) + # Added an index? Create any PostgreSQL-specific indexes. + if (not (old_field.db_index or old_field.unique) and new_field.db_index) or ( + not old_field.unique and new_field.unique + ): + like_index_statement = self._create_like_index_sql(model, new_field) + if like_index_statement is not None: + self.execute(like_index_statement) + + # Removed an index? Drop any PostgreSQL-specific indexes. + if old_field.unique and not (new_field.db_index or new_field.unique): + index_to_remove = self._create_index_name( + model._meta.db_table, [old_field.column], suffix="_like" + ) + self.execute(self._delete_index_sql(model, index_to_remove)) + + def _index_columns(self, table, columns, col_suffixes, opclasses): + if opclasses: + return IndexColumns( + table, + columns, + self.quote_name, + col_suffixes=col_suffixes, + opclasses=opclasses, + ) + return super()._index_columns(table, columns, col_suffixes, opclasses) + + def add_index(self, model, index, concurrently=False): + self.execute( + index.create_sql(model, self, concurrently=concurrently), params=None + ) + + def remove_index(self, model, index, concurrently=False): + self.execute(index.remove_sql(model, self, concurrently=concurrently)) + + def _delete_index_sql(self, model, name, sql=None, concurrently=False): + sql = ( + self.sql_delete_index_concurrently + if concurrently + else self.sql_delete_index + ) + return super()._delete_index_sql(model, name, sql) + + def _create_index_sql( + self, + model, + *, + fields=None, + name=None, + suffix="", + using="", + db_tablespace=None, + col_suffixes=(), + sql=None, + opclasses=(), + condition=None, + concurrently=False, + include=None, + expressions=None, + ): + sql = sql or ( + self.sql_create_index + if not concurrently + else self.sql_create_index_concurrently + ) + return super()._create_index_sql( + model, + fields=fields, + name=name, + suffix=suffix, + using=using, + db_tablespace=db_tablespace, + col_suffixes=col_suffixes, + sql=sql, + opclasses=opclasses, + condition=condition, + include=include, + expressions=expressions, + ) diff --git a/testbed/django__django/django/db/backends/sqlite3/_functions.py b/testbed/django__django/django/db/backends/sqlite3/_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..7e86950f7df111d6d66b59f9736f52b152dcd612 --- /dev/null +++ b/testbed/django__django/django/db/backends/sqlite3/_functions.py @@ -0,0 +1,511 @@ +""" +Implementations of SQL functions for SQLite. +""" +import functools +import random +import statistics +import zoneinfo +from datetime import timedelta +from hashlib import md5, sha1, sha224, sha256, sha384, sha512 +from math import ( + acos, + asin, + atan, + atan2, + ceil, + cos, + degrees, + exp, + floor, + fmod, + log, + pi, + radians, + sin, + sqrt, + tan, +) +from re import search as re_search + +from django.db.backends.utils import ( + split_tzname_delta, + typecast_time, + typecast_timestamp, +) +from django.utils import timezone +from django.utils.duration import duration_microseconds + + +def register(connection): + create_deterministic_function = functools.partial( + connection.create_function, + deterministic=True, + ) + create_deterministic_function("django_date_extract", 2, _sqlite_datetime_extract) + create_deterministic_function("django_date_trunc", 4, _sqlite_date_trunc) + create_deterministic_function( + "django_datetime_cast_date", 3, _sqlite_datetime_cast_date + ) + create_deterministic_function( + "django_datetime_cast_time", 3, _sqlite_datetime_cast_time + ) + create_deterministic_function( + "django_datetime_extract", 4, _sqlite_datetime_extract + ) + create_deterministic_function("django_datetime_trunc", 4, _sqlite_datetime_trunc) + create_deterministic_function("django_time_extract", 2, _sqlite_time_extract) + create_deterministic_function("django_time_trunc", 4, _sqlite_time_trunc) + create_deterministic_function("django_time_diff", 2, _sqlite_time_diff) + create_deterministic_function("django_timestamp_diff", 2, _sqlite_timestamp_diff) + create_deterministic_function("django_format_dtdelta", 3, _sqlite_format_dtdelta) + create_deterministic_function("regexp", 2, _sqlite_regexp) + create_deterministic_function("BITXOR", 2, _sqlite_bitxor) + create_deterministic_function("COT", 1, _sqlite_cot) + create_deterministic_function("LPAD", 3, _sqlite_lpad) + create_deterministic_function("MD5", 1, _sqlite_md5) + create_deterministic_function("REPEAT", 2, _sqlite_repeat) + create_deterministic_function("REVERSE", 1, _sqlite_reverse) + create_deterministic_function("RPAD", 3, _sqlite_rpad) + create_deterministic_function("SHA1", 1, _sqlite_sha1) + create_deterministic_function("SHA224", 1, _sqlite_sha224) + create_deterministic_function("SHA256", 1, _sqlite_sha256) + create_deterministic_function("SHA384", 1, _sqlite_sha384) + create_deterministic_function("SHA512", 1, _sqlite_sha512) + create_deterministic_function("SIGN", 1, _sqlite_sign) + # Don't use the built-in RANDOM() function because it returns a value + # in the range [-1 * 2^63, 2^63 - 1] instead of [0, 1). + connection.create_function("RAND", 0, random.random) + connection.create_aggregate("STDDEV_POP", 1, StdDevPop) + connection.create_aggregate("STDDEV_SAMP", 1, StdDevSamp) + connection.create_aggregate("VAR_POP", 1, VarPop) + connection.create_aggregate("VAR_SAMP", 1, VarSamp) + # Some math functions are enabled by default in SQLite 3.35+. + sql = "select sqlite_compileoption_used('ENABLE_MATH_FUNCTIONS')" + if not connection.execute(sql).fetchone()[0]: + create_deterministic_function("ACOS", 1, _sqlite_acos) + create_deterministic_function("ASIN", 1, _sqlite_asin) + create_deterministic_function("ATAN", 1, _sqlite_atan) + create_deterministic_function("ATAN2", 2, _sqlite_atan2) + create_deterministic_function("CEILING", 1, _sqlite_ceiling) + create_deterministic_function("COS", 1, _sqlite_cos) + create_deterministic_function("DEGREES", 1, _sqlite_degrees) + create_deterministic_function("EXP", 1, _sqlite_exp) + create_deterministic_function("FLOOR", 1, _sqlite_floor) + create_deterministic_function("LN", 1, _sqlite_ln) + create_deterministic_function("LOG", 2, _sqlite_log) + create_deterministic_function("MOD", 2, _sqlite_mod) + create_deterministic_function("PI", 0, _sqlite_pi) + create_deterministic_function("POWER", 2, _sqlite_power) + create_deterministic_function("RADIANS", 1, _sqlite_radians) + create_deterministic_function("SIN", 1, _sqlite_sin) + create_deterministic_function("SQRT", 1, _sqlite_sqrt) + create_deterministic_function("TAN", 1, _sqlite_tan) + + +def _sqlite_datetime_parse(dt, tzname=None, conn_tzname=None): + if dt is None: + return None + try: + dt = typecast_timestamp(dt) + except (TypeError, ValueError): + return None + if conn_tzname: + dt = dt.replace(tzinfo=zoneinfo.ZoneInfo(conn_tzname)) + if tzname is not None and tzname != conn_tzname: + tzname, sign, offset = split_tzname_delta(tzname) + if offset: + hours, minutes = offset.split(":") + offset_delta = timedelta(hours=int(hours), minutes=int(minutes)) + dt += offset_delta if sign == "+" else -offset_delta + dt = timezone.localtime(dt, zoneinfo.ZoneInfo(tzname)) + return dt + + +def _sqlite_date_trunc(lookup_type, dt, tzname, conn_tzname): + dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt is None: + return None + if lookup_type == "year": + return f"{dt.year:04d}-01-01" + elif lookup_type == "quarter": + month_in_quarter = dt.month - (dt.month - 1) % 3 + return f"{dt.year:04d}-{month_in_quarter:02d}-01" + elif lookup_type == "month": + return f"{dt.year:04d}-{dt.month:02d}-01" + elif lookup_type == "week": + dt -= timedelta(days=dt.weekday()) + return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d}" + elif lookup_type == "day": + return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d}" + raise ValueError(f"Unsupported lookup type: {lookup_type!r}") + + +def _sqlite_time_trunc(lookup_type, dt, tzname, conn_tzname): + if dt is None: + return None + dt_parsed = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt_parsed is None: + try: + dt = typecast_time(dt) + except (ValueError, TypeError): + return None + else: + dt = dt_parsed + if lookup_type == "hour": + return f"{dt.hour:02d}:00:00" + elif lookup_type == "minute": + return f"{dt.hour:02d}:{dt.minute:02d}:00" + elif lookup_type == "second": + return f"{dt.hour:02d}:{dt.minute:02d}:{dt.second:02d}" + raise ValueError(f"Unsupported lookup type: {lookup_type!r}") + + +def _sqlite_datetime_cast_date(dt, tzname, conn_tzname): + dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt is None: + return None + return dt.date().isoformat() + + +def _sqlite_datetime_cast_time(dt, tzname, conn_tzname): + dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt is None: + return None + return dt.time().isoformat() + + +def _sqlite_datetime_extract(lookup_type, dt, tzname=None, conn_tzname=None): + dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt is None: + return None + if lookup_type == "week_day": + return (dt.isoweekday() % 7) + 1 + elif lookup_type == "iso_week_day": + return dt.isoweekday() + elif lookup_type == "week": + return dt.isocalendar().week + elif lookup_type == "quarter": + return ceil(dt.month / 3) + elif lookup_type == "iso_year": + return dt.isocalendar().year + else: + return getattr(dt, lookup_type) + + +def _sqlite_datetime_trunc(lookup_type, dt, tzname, conn_tzname): + dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) + if dt is None: + return None + if lookup_type == "year": + return f"{dt.year:04d}-01-01 00:00:00" + elif lookup_type == "quarter": + month_in_quarter = dt.month - (dt.month - 1) % 3 + return f"{dt.year:04d}-{month_in_quarter:02d}-01 00:00:00" + elif lookup_type == "month": + return f"{dt.year:04d}-{dt.month:02d}-01 00:00:00" + elif lookup_type == "week": + dt -= timedelta(days=dt.weekday()) + return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d} 00:00:00" + elif lookup_type == "day": + return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d} 00:00:00" + elif lookup_type == "hour": + return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d} {dt.hour:02d}:00:00" + elif lookup_type == "minute": + return ( + f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d} " + f"{dt.hour:02d}:{dt.minute:02d}:00" + ) + elif lookup_type == "second": + return ( + f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d} " + f"{dt.hour:02d}:{dt.minute:02d}:{dt.second:02d}" + ) + raise ValueError(f"Unsupported lookup type: {lookup_type!r}") + + +def _sqlite_time_extract(lookup_type, dt): + if dt is None: + return None + try: + dt = typecast_time(dt) + except (ValueError, TypeError): + return None + return getattr(dt, lookup_type) + + +def _sqlite_prepare_dtdelta_param(conn, param): + if conn in ["+", "-"]: + if isinstance(param, int): + return timedelta(0, 0, param) + else: + return typecast_timestamp(param) + return param + + +def _sqlite_format_dtdelta(connector, lhs, rhs): + """ + LHS and RHS can be either: + - An integer number of microseconds + - A string representing a datetime + - A scalar value, e.g. float + """ + if connector is None or lhs is None or rhs is None: + return None + connector = connector.strip() + try: + real_lhs = _sqlite_prepare_dtdelta_param(connector, lhs) + real_rhs = _sqlite_prepare_dtdelta_param(connector, rhs) + except (ValueError, TypeError): + return None + if connector == "+": + # typecast_timestamp() returns a date or a datetime without timezone. + # It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]" + out = str(real_lhs + real_rhs) + elif connector == "-": + out = str(real_lhs - real_rhs) + elif connector == "*": + out = real_lhs * real_rhs + else: + out = real_lhs / real_rhs + return out + + +def _sqlite_time_diff(lhs, rhs): + if lhs is None or rhs is None: + return None + left = typecast_time(lhs) + right = typecast_time(rhs) + return ( + (left.hour * 60 * 60 * 1000000) + + (left.minute * 60 * 1000000) + + (left.second * 1000000) + + (left.microsecond) + - (right.hour * 60 * 60 * 1000000) + - (right.minute * 60 * 1000000) + - (right.second * 1000000) + - (right.microsecond) + ) + + +def _sqlite_timestamp_diff(lhs, rhs): + if lhs is None or rhs is None: + return None + left = typecast_timestamp(lhs) + right = typecast_timestamp(rhs) + return duration_microseconds(left - right) + + +def _sqlite_regexp(pattern, string): + if pattern is None or string is None: + return None + if not isinstance(string, str): + string = str(string) + return bool(re_search(pattern, string)) + + +def _sqlite_acos(x): + if x is None: + return None + return acos(x) + + +def _sqlite_asin(x): + if x is None: + return None + return asin(x) + + +def _sqlite_atan(x): + if x is None: + return None + return atan(x) + + +def _sqlite_atan2(y, x): + if y is None or x is None: + return None + return atan2(y, x) + + +def _sqlite_bitxor(x, y): + if x is None or y is None: + return None + return x ^ y + + +def _sqlite_ceiling(x): + if x is None: + return None + return ceil(x) + + +def _sqlite_cos(x): + if x is None: + return None + return cos(x) + + +def _sqlite_cot(x): + if x is None: + return None + return 1 / tan(x) + + +def _sqlite_degrees(x): + if x is None: + return None + return degrees(x) + + +def _sqlite_exp(x): + if x is None: + return None + return exp(x) + + +def _sqlite_floor(x): + if x is None: + return None + return floor(x) + + +def _sqlite_ln(x): + if x is None: + return None + return log(x) + + +def _sqlite_log(base, x): + if base is None or x is None: + return None + # Arguments reversed to match SQL standard. + return log(x, base) + + +def _sqlite_lpad(text, length, fill_text): + if text is None or length is None or fill_text is None: + return None + delta = length - len(text) + if delta <= 0: + return text[:length] + return (fill_text * length)[:delta] + text + + +def _sqlite_md5(text): + if text is None: + return None + return md5(text.encode()).hexdigest() + + +def _sqlite_mod(x, y): + if x is None or y is None: + return None + return fmod(x, y) + + +def _sqlite_pi(): + return pi + + +def _sqlite_power(x, y): + if x is None or y is None: + return None + return x**y + + +def _sqlite_radians(x): + if x is None: + return None + return radians(x) + + +def _sqlite_repeat(text, count): + if text is None or count is None: + return None + return text * count + + +def _sqlite_reverse(text): + if text is None: + return None + return text[::-1] + + +def _sqlite_rpad(text, length, fill_text): + if text is None or length is None or fill_text is None: + return None + return (text + fill_text * length)[:length] + + +def _sqlite_sha1(text): + if text is None: + return None + return sha1(text.encode()).hexdigest() + + +def _sqlite_sha224(text): + if text is None: + return None + return sha224(text.encode()).hexdigest() + + +def _sqlite_sha256(text): + if text is None: + return None + return sha256(text.encode()).hexdigest() + + +def _sqlite_sha384(text): + if text is None: + return None + return sha384(text.encode()).hexdigest() + + +def _sqlite_sha512(text): + if text is None: + return None + return sha512(text.encode()).hexdigest() + + +def _sqlite_sign(x): + if x is None: + return None + return (x > 0) - (x < 0) + + +def _sqlite_sin(x): + if x is None: + return None + return sin(x) + + +def _sqlite_sqrt(x): + if x is None: + return None + return sqrt(x) + + +def _sqlite_tan(x): + if x is None: + return None + return tan(x) + + +class ListAggregate(list): + step = list.append + + +class StdDevPop(ListAggregate): + finalize = statistics.pstdev + + +class StdDevSamp(ListAggregate): + finalize = statistics.stdev + + +class VarPop(ListAggregate): + finalize = statistics.pvariance + + +class VarSamp(ListAggregate): + finalize = statistics.variance diff --git a/testbed/django__django/django/db/backends/sqlite3/base.py b/testbed/django__django/django/db/backends/sqlite3/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a3a382a56bdc8a1c08e7ca626e18106b62cc1e37 --- /dev/null +++ b/testbed/django__django/django/db/backends/sqlite3/base.py @@ -0,0 +1,347 @@ +""" +SQLite backend for the sqlite3 module in the standard library. +""" +import datetime +import decimal +import warnings +from collections.abc import Mapping +from itertools import chain, tee +from sqlite3 import dbapi2 as Database + +from django.core.exceptions import ImproperlyConfigured +from django.db import IntegrityError +from django.db.backends.base.base import BaseDatabaseWrapper +from django.utils.asyncio import async_unsafe +from django.utils.dateparse import parse_date, parse_datetime, parse_time +from django.utils.regex_helper import _lazy_re_compile + +from ._functions import register as register_functions +from .client import DatabaseClient +from .creation import DatabaseCreation +from .features import DatabaseFeatures +from .introspection import DatabaseIntrospection +from .operations import DatabaseOperations +from .schema import DatabaseSchemaEditor + + +def decoder(conv_func): + """ + Convert bytestrings from Python's sqlite3 interface to a regular string. + """ + return lambda s: conv_func(s.decode()) + + +def adapt_date(val): + return val.isoformat() + + +def adapt_datetime(val): + return val.isoformat(" ") + + +Database.register_converter("bool", b"1".__eq__) +Database.register_converter("date", decoder(parse_date)) +Database.register_converter("time", decoder(parse_time)) +Database.register_converter("datetime", decoder(parse_datetime)) +Database.register_converter("timestamp", decoder(parse_datetime)) + +Database.register_adapter(decimal.Decimal, str) +Database.register_adapter(datetime.date, adapt_date) +Database.register_adapter(datetime.datetime, adapt_datetime) + + +class DatabaseWrapper(BaseDatabaseWrapper): + vendor = "sqlite" + display_name = "SQLite" + # SQLite doesn't actually support most of these types, but it "does the right + # thing" given more verbose field definitions, so leave them as is so that + # schema inspection is more useful. + data_types = { + "AutoField": "integer", + "BigAutoField": "integer", + "BinaryField": "BLOB", + "BooleanField": "bool", + "CharField": "varchar(%(max_length)s)", + "DateField": "date", + "DateTimeField": "datetime", + "DecimalField": "decimal", + "DurationField": "bigint", + "FileField": "varchar(%(max_length)s)", + "FilePathField": "varchar(%(max_length)s)", + "FloatField": "real", + "IntegerField": "integer", + "BigIntegerField": "bigint", + "IPAddressField": "char(15)", + "GenericIPAddressField": "char(39)", + "JSONField": "text", + "OneToOneField": "integer", + "PositiveBigIntegerField": "bigint unsigned", + "PositiveIntegerField": "integer unsigned", + "PositiveSmallIntegerField": "smallint unsigned", + "SlugField": "varchar(%(max_length)s)", + "SmallAutoField": "integer", + "SmallIntegerField": "smallint", + "TextField": "text", + "TimeField": "time", + "UUIDField": "char(32)", + } + data_type_check_constraints = { + "PositiveBigIntegerField": '"%(column)s" >= 0', + "JSONField": '(JSON_VALID("%(column)s") OR "%(column)s" IS NULL)', + "PositiveIntegerField": '"%(column)s" >= 0', + "PositiveSmallIntegerField": '"%(column)s" >= 0', + } + data_types_suffix = { + "AutoField": "AUTOINCREMENT", + "BigAutoField": "AUTOINCREMENT", + "SmallAutoField": "AUTOINCREMENT", + } + # SQLite requires LIKE statements to include an ESCAPE clause if the value + # being escaped has a percent or underscore in it. + # See https://www.sqlite.org/lang_expr.html for an explanation. + operators = { + "exact": "= %s", + "iexact": "LIKE %s ESCAPE '\\'", + "contains": "LIKE %s ESCAPE '\\'", + "icontains": "LIKE %s ESCAPE '\\'", + "regex": "REGEXP %s", + "iregex": "REGEXP '(?i)' || %s", + "gt": "> %s", + "gte": ">= %s", + "lt": "< %s", + "lte": "<= %s", + "startswith": "LIKE %s ESCAPE '\\'", + "endswith": "LIKE %s ESCAPE '\\'", + "istartswith": "LIKE %s ESCAPE '\\'", + "iendswith": "LIKE %s ESCAPE '\\'", + } + + # The patterns below are used to generate SQL pattern lookup clauses when + # the right-hand side of the lookup isn't a raw string (it might be an expression + # or the result of a bilateral transformation). + # In those cases, special characters for LIKE operators (e.g. \, *, _) should be + # escaped on database side. + # + # Note: we use str.format() here for readability as '%' is used as a wildcard for + # the LIKE operator. + pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')" + pattern_ops = { + "contains": r"LIKE '%%' || {} || '%%' ESCAPE '\'", + "icontains": r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'", + "startswith": r"LIKE {} || '%%' ESCAPE '\'", + "istartswith": r"LIKE UPPER({}) || '%%' ESCAPE '\'", + "endswith": r"LIKE '%%' || {} ESCAPE '\'", + "iendswith": r"LIKE '%%' || UPPER({}) ESCAPE '\'", + } + + Database = Database + SchemaEditorClass = DatabaseSchemaEditor + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + + def get_connection_params(self): + settings_dict = self.settings_dict + if not settings_dict["NAME"]: + raise ImproperlyConfigured( + "settings.DATABASES is improperly configured. " + "Please supply the NAME value." + ) + kwargs = { + "database": settings_dict["NAME"], + "detect_types": Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, + **settings_dict["OPTIONS"], + } + # Always allow the underlying SQLite connection to be shareable + # between multiple threads. The safe-guarding will be handled at a + # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` + # property. This is necessary as the shareability is disabled by + # default in sqlite3 and it cannot be changed once a connection is + # opened. + if "check_same_thread" in kwargs and kwargs["check_same_thread"]: + warnings.warn( + "The `check_same_thread` option was provided and set to " + "True. It will be overridden with False. Use the " + "`DatabaseWrapper.allow_thread_sharing` property instead " + "for controlling thread shareability.", + RuntimeWarning, + ) + kwargs.update({"check_same_thread": False, "uri": True}) + return kwargs + + def get_database_version(self): + return self.Database.sqlite_version_info + + @async_unsafe + def get_new_connection(self, conn_params): + conn = Database.connect(**conn_params) + register_functions(conn) + + conn.execute("PRAGMA foreign_keys = ON") + # The macOS bundled SQLite defaults legacy_alter_table ON, which + # prevents atomic table renames (feature supports_atomic_references_rename) + conn.execute("PRAGMA legacy_alter_table = OFF") + return conn + + def create_cursor(self, name=None): + return self.connection.cursor(factory=SQLiteCursorWrapper) + + @async_unsafe + def close(self): + self.validate_thread_sharing() + # If database is in memory, closing the connection destroys the + # database. To prevent accidental data loss, ignore close requests on + # an in-memory db. + if not self.is_in_memory_db(): + BaseDatabaseWrapper.close(self) + + def _savepoint_allowed(self): + # When 'isolation_level' is not None, sqlite3 commits before each + # savepoint; it's a bug. When it is None, savepoints don't make sense + # because autocommit is enabled. The only exception is inside 'atomic' + # blocks. To work around that bug, on SQLite, 'atomic' starts a + # transaction explicitly rather than simply disable autocommit. + return self.in_atomic_block + + def _set_autocommit(self, autocommit): + if autocommit: + level = None + else: + # sqlite3's internal default is ''. It's different from None. + # See Modules/_sqlite/connection.c. + level = "" + # 'isolation_level' is a misleading API. + # SQLite always runs at the SERIALIZABLE isolation level. + with self.wrap_database_errors: + self.connection.isolation_level = level + + def disable_constraint_checking(self): + with self.cursor() as cursor: + cursor.execute("PRAGMA foreign_keys = OFF") + # Foreign key constraints cannot be turned off while in a multi- + # statement transaction. Fetch the current state of the pragma + # to determine if constraints are effectively disabled. + enabled = cursor.execute("PRAGMA foreign_keys").fetchone()[0] + return not bool(enabled) + + def enable_constraint_checking(self): + with self.cursor() as cursor: + cursor.execute("PRAGMA foreign_keys = ON") + + def check_constraints(self, table_names=None): + """ + Check each table name in `table_names` for rows with invalid foreign + key references. This method is intended to be used in conjunction with + `disable_constraint_checking()` and `enable_constraint_checking()`, to + determine if rows with invalid references were entered while constraint + checks were off. + """ + with self.cursor() as cursor: + if table_names is None: + violations = cursor.execute("PRAGMA foreign_key_check").fetchall() + else: + violations = chain.from_iterable( + cursor.execute( + "PRAGMA foreign_key_check(%s)" % self.ops.quote_name(table_name) + ).fetchall() + for table_name in table_names + ) + # See https://www.sqlite.org/pragma.html#pragma_foreign_key_check + for ( + table_name, + rowid, + referenced_table_name, + foreign_key_index, + ) in violations: + foreign_key = cursor.execute( + "PRAGMA foreign_key_list(%s)" % self.ops.quote_name(table_name) + ).fetchall()[foreign_key_index] + column_name, referenced_column_name = foreign_key[3:5] + primary_key_column_name = self.introspection.get_primary_key_column( + cursor, table_name + ) + primary_key_value, bad_value = cursor.execute( + "SELECT %s, %s FROM %s WHERE rowid = %%s" + % ( + self.ops.quote_name(primary_key_column_name), + self.ops.quote_name(column_name), + self.ops.quote_name(table_name), + ), + (rowid,), + ).fetchone() + raise IntegrityError( + "The row in table '%s' with primary key '%s' has an " + "invalid foreign key: %s.%s contains a value '%s' that " + "does not have a corresponding value in %s.%s." + % ( + table_name, + primary_key_value, + table_name, + column_name, + bad_value, + referenced_table_name, + referenced_column_name, + ) + ) + + def is_usable(self): + return True + + def _start_transaction_under_autocommit(self): + """ + Start a transaction explicitly in autocommit mode. + + Staying in autocommit mode works around a bug of sqlite3 that breaks + savepoints when autocommit is disabled. + """ + self.cursor().execute("BEGIN") + + def is_in_memory_db(self): + return self.creation.is_in_memory_db(self.settings_dict["NAME"]) + + +FORMAT_QMARK_REGEX = _lazy_re_compile(r"(?= 1: + self.log( + "Destroying old test database for alias %s..." + % (self._get_database_display_str(verbosity, test_database_name),) + ) + if os.access(test_database_name, os.F_OK): + if not autoclobber: + confirm = input( + "Type 'yes' if you would like to try deleting the test " + "database '%s', or 'no' to cancel: " % test_database_name + ) + if autoclobber or confirm == "yes": + try: + os.remove(test_database_name) + except Exception as e: + self.log("Got an error deleting the old test database: %s" % e) + sys.exit(2) + else: + self.log("Tests cancelled.") + sys.exit(1) + return test_database_name + + def get_test_db_clone_settings(self, suffix): + orig_settings_dict = self.connection.settings_dict + source_database_name = orig_settings_dict["NAME"] + + if not self.is_in_memory_db(source_database_name): + root, ext = os.path.splitext(source_database_name) + return {**orig_settings_dict, "NAME": f"{root}_{suffix}{ext}"} + + start_method = multiprocessing.get_start_method() + if start_method == "fork": + return orig_settings_dict + if start_method == "spawn": + return { + **orig_settings_dict, + "NAME": f"{self.connection.alias}_{suffix}.sqlite3", + } + raise NotSupportedError( + f"Cloning with start method {start_method!r} is not supported." + ) + + def _clone_test_db(self, suffix, verbosity, keepdb=False): + source_database_name = self.connection.settings_dict["NAME"] + target_database_name = self.get_test_db_clone_settings(suffix)["NAME"] + if not self.is_in_memory_db(source_database_name): + # Erase the old test database + if os.access(target_database_name, os.F_OK): + if keepdb: + return + if verbosity >= 1: + self.log( + "Destroying old test database for alias %s..." + % ( + self._get_database_display_str( + verbosity, target_database_name + ), + ) + ) + try: + os.remove(target_database_name) + except Exception as e: + self.log("Got an error deleting the old test database: %s" % e) + sys.exit(2) + try: + shutil.copy(source_database_name, target_database_name) + except Exception as e: + self.log("Got an error cloning the test database: %s" % e) + sys.exit(2) + # Forking automatically makes a copy of an in-memory database. + # Spawn requires migrating to disk which will be re-opened in + # setup_worker_connection. + elif multiprocessing.get_start_method() == "spawn": + ondisk_db = sqlite3.connect(target_database_name, uri=True) + self.connection.connection.backup(ondisk_db) + ondisk_db.close() + + def _destroy_test_db(self, test_database_name, verbosity): + if test_database_name and not self.is_in_memory_db(test_database_name): + # Remove the SQLite database file + os.remove(test_database_name) + + def test_db_signature(self): + """ + Return a tuple that uniquely identifies a test database. + + This takes into account the special cases of ":memory:" and "" for + SQLite since the databases will be distinct despite having the same + TEST NAME. See https://www.sqlite.org/inmemorydb.html + """ + test_database_name = self._get_test_db_name() + sig = [self.connection.settings_dict["NAME"]] + if self.is_in_memory_db(test_database_name): + sig.append(self.connection.alias) + else: + sig.append(test_database_name) + return tuple(sig) + + def setup_worker_connection(self, _worker_id): + settings_dict = self.get_test_db_clone_settings(_worker_id) + # connection.settings_dict must be updated in place for changes to be + # reflected in django.db.connections. Otherwise new threads would + # connect to the default database instead of the appropriate clone. + start_method = multiprocessing.get_start_method() + if start_method == "fork": + # Update settings_dict in place. + self.connection.settings_dict.update(settings_dict) + self.connection.close() + elif start_method == "spawn": + alias = self.connection.alias + connection_str = ( + f"file:memorydb_{alias}_{_worker_id}?mode=memory&cache=shared" + ) + source_db = self.connection.Database.connect( + f"file:{alias}_{_worker_id}.sqlite3", uri=True + ) + target_db = sqlite3.connect(connection_str, uri=True) + source_db.backup(target_db) + source_db.close() + # Update settings_dict in place. + self.connection.settings_dict.update(settings_dict) + self.connection.settings_dict["NAME"] = connection_str + # Re-open connection to in-memory database before closing copy + # connection. + self.connection.connect() + target_db.close() + if os.environ.get("RUNNING_DJANGOS_TEST_SUITE") == "true": + self.mark_expected_failures_and_skips() diff --git a/testbed/django__django/django/db/backends/sqlite3/operations.py b/testbed/django__django/django/db/backends/sqlite3/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..85ad8043487bb25d009c2bbeed338ae1639e6795 --- /dev/null +++ b/testbed/django__django/django/db/backends/sqlite3/operations.py @@ -0,0 +1,441 @@ +import datetime +import decimal +import uuid +from functools import lru_cache +from itertools import chain + +from django.conf import settings +from django.core.exceptions import FieldError +from django.db import DatabaseError, NotSupportedError, models +from django.db.backends.base.operations import BaseDatabaseOperations +from django.db.models.constants import OnConflict +from django.db.models.expressions import Col +from django.utils import timezone +from django.utils.dateparse import parse_date, parse_datetime, parse_time +from django.utils.functional import cached_property + + +class DatabaseOperations(BaseDatabaseOperations): + cast_char_field_without_max_length = "text" + cast_data_types = { + "DateField": "TEXT", + "DateTimeField": "TEXT", + } + explain_prefix = "EXPLAIN QUERY PLAN" + # List of datatypes to that cannot be extracted with JSON_EXTRACT() on + # SQLite. Use JSON_TYPE() instead. + jsonfield_datatype_values = frozenset(["null", "false", "true"]) + + def bulk_batch_size(self, fields, objs): + """ + SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of + 999 variables per query. + + If there's only a single field to insert, the limit is 500 + (SQLITE_MAX_COMPOUND_SELECT). + """ + if len(fields) == 1: + return 500 + elif len(fields) > 1: + return self.connection.features.max_query_params // len(fields) + else: + return len(objs) + + def check_expression_support(self, expression): + bad_fields = (models.DateField, models.DateTimeField, models.TimeField) + bad_aggregates = (models.Sum, models.Avg, models.Variance, models.StdDev) + if isinstance(expression, bad_aggregates): + for expr in expression.get_source_expressions(): + try: + output_field = expr.output_field + except (AttributeError, FieldError): + # Not every subexpression has an output_field which is fine + # to ignore. + pass + else: + if isinstance(output_field, bad_fields): + raise NotSupportedError( + "You cannot use Sum, Avg, StdDev, and Variance " + "aggregations on date/time fields in sqlite3 " + "since date/time is saved as text." + ) + if ( + isinstance(expression, models.Aggregate) + and expression.distinct + and len(expression.source_expressions) > 1 + ): + raise NotSupportedError( + "SQLite doesn't support DISTINCT on aggregate functions " + "accepting multiple arguments." + ) + + def date_extract_sql(self, lookup_type, sql, params): + """ + Support EXTRACT with a user-defined function django_date_extract() + that's registered in connect(). Use single quotes because this is a + string and could otherwise cause a collision with a field name. + """ + return f"django_date_extract(%s, {sql})", (lookup_type.lower(), *params) + + def fetch_returned_insert_rows(self, cursor): + """ + Given a cursor object that has just performed an INSERT...RETURNING + statement into a table, return the list of returned data. + """ + return cursor.fetchall() + + def format_for_duration_arithmetic(self, sql): + """Do nothing since formatting is handled in the custom function.""" + return sql + + def date_trunc_sql(self, lookup_type, sql, params, tzname=None): + return f"django_date_trunc(%s, {sql}, %s, %s)", ( + lookup_type.lower(), + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def time_trunc_sql(self, lookup_type, sql, params, tzname=None): + return f"django_time_trunc(%s, {sql}, %s, %s)", ( + lookup_type.lower(), + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def _convert_tznames_to_sql(self, tzname): + if tzname and settings.USE_TZ: + return tzname, self.connection.timezone_name + return None, None + + def datetime_cast_date_sql(self, sql, params, tzname): + return f"django_datetime_cast_date({sql}, %s, %s)", ( + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def datetime_cast_time_sql(self, sql, params, tzname): + return f"django_datetime_cast_time({sql}, %s, %s)", ( + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def datetime_extract_sql(self, lookup_type, sql, params, tzname): + return f"django_datetime_extract(%s, {sql}, %s, %s)", ( + lookup_type.lower(), + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def datetime_trunc_sql(self, lookup_type, sql, params, tzname): + return f"django_datetime_trunc(%s, {sql}, %s, %s)", ( + lookup_type.lower(), + *params, + *self._convert_tznames_to_sql(tzname), + ) + + def time_extract_sql(self, lookup_type, sql, params): + return f"django_time_extract(%s, {sql})", (lookup_type.lower(), *params) + + def pk_default_value(self): + return "NULL" + + def _quote_params_for_last_executed_query(self, params): + """ + Only for last_executed_query! Don't use this to execute SQL queries! + """ + # This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the + # number of parameters, default = 999) and SQLITE_MAX_COLUMN (the + # number of return values, default = 2000). Since Python's sqlite3 + # module doesn't expose the get_limit() C API, assume the default + # limits are in effect and split the work in batches if needed. + BATCH_SIZE = 999 + if len(params) > BATCH_SIZE: + results = () + for index in range(0, len(params), BATCH_SIZE): + chunk = params[index : index + BATCH_SIZE] + results += self._quote_params_for_last_executed_query(chunk) + return results + + sql = "SELECT " + ", ".join(["QUOTE(?)"] * len(params)) + # Bypass Django's wrappers and use the underlying sqlite3 connection + # to avoid logging this query - it would trigger infinite recursion. + cursor = self.connection.connection.cursor() + # Native sqlite3 cursors cannot be used as context managers. + try: + return cursor.execute(sql, params).fetchone() + finally: + cursor.close() + + def last_executed_query(self, cursor, sql, params): + # Python substitutes parameters in Modules/_sqlite/cursor.c with: + # bind_parameters(state, self->statement, parameters); + # Unfortunately there is no way to reach self->statement from Python, + # so we quote and substitute parameters manually. + if params: + if isinstance(params, (list, tuple)): + params = self._quote_params_for_last_executed_query(params) + else: + values = tuple(params.values()) + values = self._quote_params_for_last_executed_query(values) + params = dict(zip(params, values)) + return sql % params + # For consistency with SQLiteCursorWrapper.execute(), just return sql + # when there are no parameters. See #13648 and #17158. + else: + return sql + + def quote_name(self, name): + if name.startswith('"') and name.endswith('"'): + return name # Quoting once is enough. + return '"%s"' % name + + def no_limit_value(self): + return -1 + + def __references_graph(self, table_name): + query = """ + WITH tables AS ( + SELECT %s name + UNION + SELECT sqlite_master.name + FROM sqlite_master + JOIN tables ON (sql REGEXP %s || tables.name || %s) + ) SELECT name FROM tables; + """ + params = ( + table_name, + r'(?i)\s+references\s+("|\')?', + r'("|\')?\s*\(', + ) + with self.connection.cursor() as cursor: + results = cursor.execute(query, params) + return [row[0] for row in results.fetchall()] + + @cached_property + def _references_graph(self): + # 512 is large enough to fit the ~330 tables (as of this writing) in + # Django's test suite. + return lru_cache(maxsize=512)(self.__references_graph) + + def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): + if tables and allow_cascade: + # Simulate TRUNCATE CASCADE by recursively collecting the tables + # referencing the tables to be flushed. + tables = set( + chain.from_iterable(self._references_graph(table) for table in tables) + ) + sql = [ + "%s %s %s;" + % ( + style.SQL_KEYWORD("DELETE"), + style.SQL_KEYWORD("FROM"), + style.SQL_FIELD(self.quote_name(table)), + ) + for table in tables + ] + if reset_sequences: + sequences = [{"table": table} for table in tables] + sql.extend(self.sequence_reset_by_name_sql(style, sequences)) + return sql + + def sequence_reset_by_name_sql(self, style, sequences): + if not sequences: + return [] + return [ + "%s %s %s %s = 0 %s %s %s (%s);" + % ( + style.SQL_KEYWORD("UPDATE"), + style.SQL_TABLE(self.quote_name("sqlite_sequence")), + style.SQL_KEYWORD("SET"), + style.SQL_FIELD(self.quote_name("seq")), + style.SQL_KEYWORD("WHERE"), + style.SQL_FIELD(self.quote_name("name")), + style.SQL_KEYWORD("IN"), + ", ".join( + ["'%s'" % sequence_info["table"] for sequence_info in sequences] + ), + ), + ] + + def adapt_datetimefield_value(self, value): + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + # SQLite doesn't support tz-aware datetimes + if timezone.is_aware(value): + if settings.USE_TZ: + value = timezone.make_naive(value, self.connection.timezone) + else: + raise ValueError( + "SQLite backend does not support timezone-aware datetimes when " + "USE_TZ is False." + ) + + return str(value) + + def adapt_timefield_value(self, value): + if value is None: + return None + + # Expression values are adapted by the database. + if hasattr(value, "resolve_expression"): + return value + + # SQLite doesn't support tz-aware datetimes + if timezone.is_aware(value): + raise ValueError("SQLite backend does not support timezone-aware times.") + + return str(value) + + def get_db_converters(self, expression): + converters = super().get_db_converters(expression) + internal_type = expression.output_field.get_internal_type() + if internal_type == "DateTimeField": + converters.append(self.convert_datetimefield_value) + elif internal_type == "DateField": + converters.append(self.convert_datefield_value) + elif internal_type == "TimeField": + converters.append(self.convert_timefield_value) + elif internal_type == "DecimalField": + converters.append(self.get_decimalfield_converter(expression)) + elif internal_type == "UUIDField": + converters.append(self.convert_uuidfield_value) + elif internal_type == "BooleanField": + converters.append(self.convert_booleanfield_value) + return converters + + def convert_datetimefield_value(self, value, expression, connection): + if value is not None: + if not isinstance(value, datetime.datetime): + value = parse_datetime(value) + if settings.USE_TZ and not timezone.is_aware(value): + value = timezone.make_aware(value, self.connection.timezone) + return value + + def convert_datefield_value(self, value, expression, connection): + if value is not None: + if not isinstance(value, datetime.date): + value = parse_date(value) + return value + + def convert_timefield_value(self, value, expression, connection): + if value is not None: + if not isinstance(value, datetime.time): + value = parse_time(value) + return value + + def get_decimalfield_converter(self, expression): + # SQLite stores only 15 significant digits. Digits coming from + # float inaccuracy must be removed. + create_decimal = decimal.Context(prec=15).create_decimal_from_float + if isinstance(expression, Col): + quantize_value = decimal.Decimal(1).scaleb( + -expression.output_field.decimal_places + ) + + def converter(value, expression, connection): + if value is not None: + return create_decimal(value).quantize( + quantize_value, context=expression.output_field.context + ) + + else: + + def converter(value, expression, connection): + if value is not None: + return create_decimal(value) + + return converter + + def convert_uuidfield_value(self, value, expression, connection): + if value is not None: + value = uuid.UUID(value) + return value + + def convert_booleanfield_value(self, value, expression, connection): + return bool(value) if value in (1, 0) else value + + def bulk_insert_sql(self, fields, placeholder_rows): + placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) + values_sql = ", ".join(f"({sql})" for sql in placeholder_rows_sql) + return f"VALUES {values_sql}" + + def combine_expression(self, connector, sub_expressions): + # SQLite doesn't have a ^ operator, so use the user-defined POWER + # function that's registered in connect(). + if connector == "^": + return "POWER(%s)" % ",".join(sub_expressions) + elif connector == "#": + return "BITXOR(%s)" % ",".join(sub_expressions) + return super().combine_expression(connector, sub_expressions) + + def combine_duration_expression(self, connector, sub_expressions): + if connector not in ["+", "-", "*", "/"]: + raise DatabaseError("Invalid connector for timedelta: %s." % connector) + fn_params = ["'%s'" % connector] + sub_expressions + if len(fn_params) > 3: + raise ValueError("Too many params for timedelta operations.") + return "django_format_dtdelta(%s)" % ", ".join(fn_params) + + def integer_field_range(self, internal_type): + # SQLite doesn't enforce any integer constraints, but sqlite3 supports + # integers up to 64 bits. + if internal_type in [ + "PositiveBigIntegerField", + "PositiveIntegerField", + "PositiveSmallIntegerField", + ]: + return (0, 9223372036854775807) + return (-9223372036854775808, 9223372036854775807) + + def subtract_temporals(self, internal_type, lhs, rhs): + lhs_sql, lhs_params = lhs + rhs_sql, rhs_params = rhs + params = (*lhs_params, *rhs_params) + if internal_type == "TimeField": + return "django_time_diff(%s, %s)" % (lhs_sql, rhs_sql), params + return "django_timestamp_diff(%s, %s)" % (lhs_sql, rhs_sql), params + + def insert_statement(self, on_conflict=None): + if on_conflict == OnConflict.IGNORE: + return "INSERT OR IGNORE INTO" + return super().insert_statement(on_conflict=on_conflict) + + def return_insert_columns(self, fields): + # SQLite < 3.35 doesn't support an INSERT...RETURNING statement. + if not fields: + return "", () + columns = [ + "%s.%s" + % ( + self.quote_name(field.model._meta.db_table), + self.quote_name(field.column), + ) + for field in fields + ] + return "RETURNING %s" % ", ".join(columns), () + + def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields): + if ( + on_conflict == OnConflict.UPDATE + and self.connection.features.supports_update_conflicts_with_target + ): + return "ON CONFLICT(%s) DO UPDATE SET %s" % ( + ", ".join(map(self.quote_name, unique_fields)), + ", ".join( + [ + f"{field} = EXCLUDED.{field}" + for field in map(self.quote_name, update_fields) + ] + ), + ) + return super().on_conflict_suffix_sql( + fields, + on_conflict, + update_fields, + unique_fields, + ) diff --git a/testbed/django__django/django/db/models/options.py b/testbed/django__django/django/db/models/options.py new file mode 100644 index 0000000000000000000000000000000000000000..64e5ff53eaa83f3089a9b9d315ed748f1903fe5e --- /dev/null +++ b/testbed/django__django/django/db/models/options.py @@ -0,0 +1,1007 @@ +import bisect +import copy +import inspect +import warnings +from collections import defaultdict + +from django.apps import apps +from django.conf import settings +from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured +from django.db import connections +from django.db.models import AutoField, Manager, OrderWrt, UniqueConstraint +from django.db.models.query_utils import PathInfo +from django.utils.datastructures import ImmutableList, OrderedSet +from django.utils.deprecation import RemovedInDjango51Warning +from django.utils.functional import cached_property +from django.utils.module_loading import import_string +from django.utils.text import camel_case_to_spaces, format_lazy +from django.utils.translation import override + +PROXY_PARENTS = object() + +EMPTY_RELATION_TREE = () + +IMMUTABLE_WARNING = ( + "The return type of '%s' should never be mutated. If you want to manipulate this " + "list for your own use, make a copy first." +) + +DEFAULT_NAMES = ( + "verbose_name", + "verbose_name_plural", + "db_table", + "db_table_comment", + "ordering", + "unique_together", + "permissions", + "get_latest_by", + "order_with_respect_to", + "app_label", + "db_tablespace", + "abstract", + "managed", + "proxy", + "swappable", + "auto_created", + "index_together", # RemovedInDjango51Warning. + "apps", + "default_permissions", + "select_on_save", + "default_related_name", + "required_db_features", + "required_db_vendor", + "base_manager_name", + "default_manager_name", + "indexes", + "constraints", +) + + +def normalize_together(option_together): + """ + option_together can be either a tuple of tuples, or a single + tuple of two strings. Normalize it to a tuple of tuples, so that + calling code can uniformly expect that. + """ + try: + if not option_together: + return () + if not isinstance(option_together, (tuple, list)): + raise TypeError + first_element = option_together[0] + if not isinstance(first_element, (tuple, list)): + option_together = (option_together,) + # Normalize everything to tuples + return tuple(tuple(ot) for ot in option_together) + except TypeError: + # If the value of option_together isn't valid, return it + # verbatim; this will be picked up by the check framework later. + return option_together + + +def make_immutable_fields_list(name, data): + return ImmutableList(data, warning=IMMUTABLE_WARNING % name) + + +class Options: + FORWARD_PROPERTIES = { + "fields", + "many_to_many", + "concrete_fields", + "local_concrete_fields", + "_non_pk_concrete_field_names", + "_forward_fields_map", + "managers", + "managers_map", + "base_manager", + "default_manager", + } + REVERSE_PROPERTIES = {"related_objects", "fields_map", "_relation_tree"} + + default_apps = apps + + def __init__(self, meta, app_label=None): + self._get_fields_cache = {} + self.local_fields = [] + self.local_many_to_many = [] + self.private_fields = [] + self.local_managers = [] + self.base_manager_name = None + self.default_manager_name = None + self.model_name = None + self.verbose_name = None + self.verbose_name_plural = None + self.db_table = "" + self.db_table_comment = "" + self.ordering = [] + self._ordering_clash = False + self.indexes = [] + self.constraints = [] + self.unique_together = [] + self.index_together = [] # RemovedInDjango51Warning. + self.select_on_save = False + self.default_permissions = ("add", "change", "delete", "view") + self.permissions = [] + self.object_name = None + self.app_label = app_label + self.get_latest_by = None + self.order_with_respect_to = None + self.db_tablespace = settings.DEFAULT_TABLESPACE + self.required_db_features = [] + self.required_db_vendor = None + self.meta = meta + self.pk = None + self.auto_field = None + self.abstract = False + self.managed = True + self.proxy = False + # For any class that is a proxy (including automatically created + # classes for deferred object loading), proxy_for_model tells us + # which class this model is proxying. Note that proxy_for_model + # can create a chain of proxy models. For non-proxy models, the + # variable is always None. + self.proxy_for_model = None + # For any non-abstract class, the concrete class is the model + # in the end of the proxy_for_model chain. In particular, for + # concrete models, the concrete_model is always the class itself. + self.concrete_model = None + self.swappable = None + self.parents = {} + self.auto_created = False + + # List of all lookups defined in ForeignKey 'limit_choices_to' options + # from *other* models. Needed for some admin checks. Internal use only. + self.related_fkey_lookups = [] + + # A custom app registry to use, if you're making a separate model set. + self.apps = self.default_apps + + self.default_related_name = None + + @property + def label(self): + return "%s.%s" % (self.app_label, self.object_name) + + @property + def label_lower(self): + return "%s.%s" % (self.app_label, self.model_name) + + @property + def app_config(self): + # Don't go through get_app_config to avoid triggering imports. + return self.apps.app_configs.get(self.app_label) + + def contribute_to_class(self, cls, name): + from django.db import connection + from django.db.backends.utils import truncate_name + + cls._meta = self + self.model = cls + # First, construct the default values for these options. + self.object_name = cls.__name__ + self.model_name = self.object_name.lower() + self.verbose_name = camel_case_to_spaces(self.object_name) + + # Store the original user-defined values for each option, + # for use when serializing the model definition + self.original_attrs = {} + + # Next, apply any overridden values from 'class Meta'. + if self.meta: + meta_attrs = self.meta.__dict__.copy() + for name in self.meta.__dict__: + # Ignore any private attributes that Django doesn't care about. + # NOTE: We can't modify a dictionary's contents while looping + # over it, so we loop over the *original* dictionary instead. + if name.startswith("_"): + del meta_attrs[name] + for attr_name in DEFAULT_NAMES: + if attr_name in meta_attrs: + setattr(self, attr_name, meta_attrs.pop(attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) + elif hasattr(self.meta, attr_name): + setattr(self, attr_name, getattr(self.meta, attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) + + self.unique_together = normalize_together(self.unique_together) + self.index_together = normalize_together(self.index_together) + if self.index_together: + warnings.warn( + f"'index_together' is deprecated. Use 'Meta.indexes' in " + f"{self.label!r} instead.", + RemovedInDjango51Warning, + ) + # App label/class name interpolation for names of constraints and + # indexes. + if not getattr(cls._meta, "abstract", False): + for attr_name in {"constraints", "indexes"}: + objs = getattr(self, attr_name, []) + setattr(self, attr_name, self._format_names_with_class(cls, objs)) + + # verbose_name_plural is a special case because it uses a 's' + # by default. + if self.verbose_name_plural is None: + self.verbose_name_plural = format_lazy("{}s", self.verbose_name) + + # order_with_respect_and ordering are mutually exclusive. + self._ordering_clash = bool(self.ordering and self.order_with_respect_to) + + # Any leftover attributes must be invalid. + if meta_attrs != {}: + raise TypeError( + "'class Meta' got invalid attribute(s): %s" % ",".join(meta_attrs) + ) + else: + self.verbose_name_plural = format_lazy("{}s", self.verbose_name) + del self.meta + + # If the db_table wasn't provided, use the app_label + model_name. + if not self.db_table: + self.db_table = "%s_%s" % (self.app_label, self.model_name) + self.db_table = truncate_name( + self.db_table, connection.ops.max_name_length() + ) + + def _format_names_with_class(self, cls, objs): + """App label/class name interpolation for object names.""" + new_objs = [] + for obj in objs: + obj = obj.clone() + obj.name = obj.name % { + "app_label": cls._meta.app_label.lower(), + "class": cls.__name__.lower(), + } + new_objs.append(obj) + return new_objs + + def _get_default_pk_class(self): + pk_class_path = getattr( + self.app_config, + "default_auto_field", + settings.DEFAULT_AUTO_FIELD, + ) + if self.app_config and self.app_config._is_default_auto_field_overridden: + app_config_class = type(self.app_config) + source = ( + f"{app_config_class.__module__}." + f"{app_config_class.__qualname__}.default_auto_field" + ) + else: + source = "DEFAULT_AUTO_FIELD" + if not pk_class_path: + raise ImproperlyConfigured(f"{source} must not be empty.") + try: + pk_class = import_string(pk_class_path) + except ImportError as e: + msg = ( + f"{source} refers to the module '{pk_class_path}' that could " + f"not be imported." + ) + raise ImproperlyConfigured(msg) from e + if not issubclass(pk_class, AutoField): + raise ValueError( + f"Primary key '{pk_class_path}' referred by {source} must " + f"subclass AutoField." + ) + return pk_class + + def _prepare(self, model): + if self.order_with_respect_to: + # The app registry will not be ready at this point, so we cannot + # use get_field(). + query = self.order_with_respect_to + try: + self.order_with_respect_to = next( + f + for f in self._get_fields(reverse=False) + if f.name == query or f.attname == query + ) + except StopIteration: + raise FieldDoesNotExist( + "%s has no field named '%s'" % (self.object_name, query) + ) + + self.ordering = ("_order",) + if not any( + isinstance(field, OrderWrt) for field in model._meta.local_fields + ): + model.add_to_class("_order", OrderWrt()) + else: + self.order_with_respect_to = None + + if self.pk is None: + if self.parents: + # Promote the first parent link in lieu of adding yet another + # field. + field = next(iter(self.parents.values())) + # Look for a local field with the same name as the + # first parent link. If a local field has already been + # created, use it instead of promoting the parent + already_created = [ + fld for fld in self.local_fields if fld.name == field.name + ] + if already_created: + field = already_created[0] + field.primary_key = True + self.setup_pk(field) + else: + pk_class = self._get_default_pk_class() + auto = pk_class(verbose_name="ID", primary_key=True, auto_created=True) + model.add_to_class("id", auto) + + def add_manager(self, manager): + self.local_managers.append(manager) + self._expire_cache() + + def add_field(self, field, private=False): + # Insert the given field in the order in which it was created, using + # the "creation_counter" attribute of the field. + # Move many-to-many related fields from self.fields into + # self.many_to_many. + if private: + self.private_fields.append(field) + elif field.is_relation and field.many_to_many: + bisect.insort(self.local_many_to_many, field) + else: + bisect.insort(self.local_fields, field) + self.setup_pk(field) + + # If the field being added is a relation to another known field, + # expire the cache on this field and the forward cache on the field + # being referenced, because there will be new relationships in the + # cache. Otherwise, expire the cache of references *to* this field. + # The mechanism for getting at the related model is slightly odd - + # ideally, we'd just ask for field.related_model. However, related_model + # is a cached property, and all the models haven't been loaded yet, so + # we need to make sure we don't cache a string reference. + if ( + field.is_relation + and hasattr(field.remote_field, "model") + and field.remote_field.model + ): + try: + field.remote_field.model._meta._expire_cache(forward=False) + except AttributeError: + pass + self._expire_cache() + else: + self._expire_cache(reverse=False) + + def setup_pk(self, field): + if not self.pk and field.primary_key: + self.pk = field + field.serialize = False + + def setup_proxy(self, target): + """ + Do the internal setup so that the current model is a proxy for + "target". + """ + self.pk = target._meta.pk + self.proxy_for_model = target + self.db_table = target._meta.db_table + + def __repr__(self): + return "" % self.object_name + + def __str__(self): + return self.label_lower + + def can_migrate(self, connection): + """ + Return True if the model can/should be migrated on the `connection`. + `connection` can be either a real connection or a connection alias. + """ + if self.proxy or self.swapped or not self.managed: + return False + if isinstance(connection, str): + connection = connections[connection] + if self.required_db_vendor: + return self.required_db_vendor == connection.vendor + if self.required_db_features: + return all( + getattr(connection.features, feat, False) + for feat in self.required_db_features + ) + return True + + @property + def verbose_name_raw(self): + """Return the untranslated verbose name.""" + with override(None): + return str(self.verbose_name) + + @property + def swapped(self): + """ + Has this model been swapped out for another? If so, return the model + name of the replacement; otherwise, return None. + + For historical reasons, model name lookups using get_model() are + case insensitive, so we make sure we are case insensitive here. + """ + if self.swappable: + swapped_for = getattr(settings, self.swappable, None) + if swapped_for: + try: + swapped_label, swapped_object = swapped_for.split(".") + except ValueError: + # setting not in the format app_label.model_name + # raising ImproperlyConfigured here causes problems with + # test cleanup code - instead it is raised in get_user_model + # or as part of validation. + return swapped_for + + if ( + "%s.%s" % (swapped_label, swapped_object.lower()) + != self.label_lower + ): + return swapped_for + return None + + @cached_property + def managers(self): + managers = [] + seen_managers = set() + bases = (b for b in self.model.mro() if hasattr(b, "_meta")) + for depth, base in enumerate(bases): + for manager in base._meta.local_managers: + if manager.name in seen_managers: + continue + + manager = copy.copy(manager) + manager.model = self.model + seen_managers.add(manager.name) + managers.append((depth, manager.creation_counter, manager)) + + return make_immutable_fields_list( + "managers", + (m[2] for m in sorted(managers)), + ) + + @cached_property + def managers_map(self): + return {manager.name: manager for manager in self.managers} + + @cached_property + def base_manager(self): + base_manager_name = self.base_manager_name + if not base_manager_name: + # Get the first parent's base_manager_name if there's one. + for parent in self.model.mro()[1:]: + if hasattr(parent, "_meta"): + if parent._base_manager.name != "_base_manager": + base_manager_name = parent._base_manager.name + break + + if base_manager_name: + try: + return self.managers_map[base_manager_name] + except KeyError: + raise ValueError( + "%s has no manager named %r" + % ( + self.object_name, + base_manager_name, + ) + ) + + manager = Manager() + manager.name = "_base_manager" + manager.model = self.model + manager.auto_created = True + return manager + + @cached_property + def default_manager(self): + default_manager_name = self.default_manager_name + if not default_manager_name and not self.local_managers: + # Get the first parent's default_manager_name if there's one. + for parent in self.model.mro()[1:]: + if hasattr(parent, "_meta"): + default_manager_name = parent._meta.default_manager_name + break + + if default_manager_name: + try: + return self.managers_map[default_manager_name] + except KeyError: + raise ValueError( + "%s has no manager named %r" + % ( + self.object_name, + default_manager_name, + ) + ) + + if self.managers: + return self.managers[0] + + @cached_property + def fields(self): + """ + Return a list of all forward fields on the model and its parents, + excluding ManyToManyFields. + + Private API intended only to be used by Django itself; get_fields() + combined with filtering of field properties is the public API for + obtaining this field list. + """ + + # For legacy reasons, the fields property should only contain forward + # fields that are not private or with a m2m cardinality. Therefore we + # pass these three filters as filters to the generator. + # The third lambda is a longwinded way of checking f.related_model - we don't + # use that property directly because related_model is a cached property, + # and all the models may not have been loaded yet; we don't want to cache + # the string reference to the related_model. + def is_not_an_m2m_field(f): + return not (f.is_relation and f.many_to_many) + + def is_not_a_generic_relation(f): + return not (f.is_relation and f.one_to_many) + + def is_not_a_generic_foreign_key(f): + return not ( + f.is_relation + and f.many_to_one + and not (hasattr(f.remote_field, "model") and f.remote_field.model) + ) + + return make_immutable_fields_list( + "fields", + ( + f + for f in self._get_fields(reverse=False) + if is_not_an_m2m_field(f) + and is_not_a_generic_relation(f) + and is_not_a_generic_foreign_key(f) + ), + ) + + @cached_property + def concrete_fields(self): + """ + Return a list of all concrete fields on the model and its parents. + + Private API intended only to be used by Django itself; get_fields() + combined with filtering of field properties is the public API for + obtaining this field list. + """ + return make_immutable_fields_list( + "concrete_fields", (f for f in self.fields if f.concrete) + ) + + @cached_property + def local_concrete_fields(self): + """ + Return a list of all concrete fields on the model. + + Private API intended only to be used by Django itself; get_fields() + combined with filtering of field properties is the public API for + obtaining this field list. + """ + return make_immutable_fields_list( + "local_concrete_fields", (f for f in self.local_fields if f.concrete) + ) + + @cached_property + def many_to_many(self): + """ + Return a list of all many to many fields on the model and its parents. + + Private API intended only to be used by Django itself; get_fields() + combined with filtering of field properties is the public API for + obtaining this list. + """ + return make_immutable_fields_list( + "many_to_many", + ( + f + for f in self._get_fields(reverse=False) + if f.is_relation and f.many_to_many + ), + ) + + @cached_property + def related_objects(self): + """ + Return all related objects pointing to the current model. The related + objects can come from a one-to-one, one-to-many, or many-to-many field + relation type. + + Private API intended only to be used by Django itself; get_fields() + combined with filtering of field properties is the public API for + obtaining this field list. + """ + all_related_fields = self._get_fields( + forward=False, reverse=True, include_hidden=True + ) + return make_immutable_fields_list( + "related_objects", + ( + obj + for obj in all_related_fields + if not obj.hidden or obj.field.many_to_many + ), + ) + + @cached_property + def _forward_fields_map(self): + res = {} + fields = self._get_fields(reverse=False) + for field in fields: + res[field.name] = field + # Due to the way Django's internals work, get_field() should also + # be able to fetch a field by attname. In the case of a concrete + # field with relation, includes the *_id name too + try: + res[field.attname] = field + except AttributeError: + pass + return res + + @cached_property + def fields_map(self): + res = {} + fields = self._get_fields(forward=False, include_hidden=True) + for field in fields: + res[field.name] = field + # Due to the way Django's internals work, get_field() should also + # be able to fetch a field by attname. In the case of a concrete + # field with relation, includes the *_id name too + try: + res[field.attname] = field + except AttributeError: + pass + return res + + def get_field(self, field_name): + """ + Return a field instance given the name of a forward or reverse field. + """ + try: + # In order to avoid premature loading of the relation tree + # (expensive) we prefer checking if the field is a forward field. + return self._forward_fields_map[field_name] + except KeyError: + # If the app registry is not ready, reverse fields are + # unavailable, therefore we throw a FieldDoesNotExist exception. + if not self.apps.models_ready: + raise FieldDoesNotExist( + "%s has no field named '%s'. The app cache isn't ready yet, " + "so if this is an auto-created related field, it won't " + "be available yet." % (self.object_name, field_name) + ) + + try: + # Retrieve field instance by name from cached or just-computed + # field map. + return self.fields_map[field_name] + except KeyError: + raise FieldDoesNotExist( + "%s has no field named '%s'" % (self.object_name, field_name) + ) + + def get_base_chain(self, model): + """ + Return a list of parent classes leading to `model` (ordered from + closest to most distant ancestor). This has to handle the case where + `model` is a grandparent or even more distant relation. + """ + if not self.parents: + return [] + if model in self.parents: + return [model] + for parent in self.parents: + res = parent._meta.get_base_chain(model) + if res: + res.insert(0, parent) + return res + return [] + + def get_parent_list(self): + """ + Return all the ancestors of this model as a list ordered by MRO. + Useful for determining if something is an ancestor, regardless of lineage. + """ + result = OrderedSet(self.parents) + for parent in self.parents: + for ancestor in parent._meta.get_parent_list(): + result.add(ancestor) + return list(result) + + def get_ancestor_link(self, ancestor): + """ + Return the field on the current model which points to the given + "ancestor". This is possible an indirect link (a pointer to a parent + model, which points, eventually, to the ancestor). Used when + constructing table joins for model inheritance. + + Return None if the model isn't an ancestor of this one. + """ + if ancestor in self.parents: + return self.parents[ancestor] + for parent in self.parents: + # Tries to get a link field from the immediate parent + parent_link = parent._meta.get_ancestor_link(ancestor) + if parent_link: + # In case of a proxied model, the first link + # of the chain to the ancestor is that parent + # links + return self.parents[parent] or parent_link + + def get_path_to_parent(self, parent): + """ + Return a list of PathInfos containing the path from the current + model to the parent model, or an empty list if parent is not a + parent of the current model. + """ + if self.model is parent: + return [] + # Skip the chain of proxy to the concrete proxied model. + proxied_model = self.concrete_model + path = [] + opts = self + for int_model in self.get_base_chain(parent): + if int_model is proxied_model: + opts = int_model._meta + else: + final_field = opts.parents[int_model] + targets = (final_field.remote_field.get_related_field(),) + opts = int_model._meta + path.append( + PathInfo( + from_opts=final_field.model._meta, + to_opts=opts, + target_fields=targets, + join_field=final_field, + m2m=False, + direct=True, + filtered_relation=None, + ) + ) + return path + + def get_path_from_parent(self, parent): + """ + Return a list of PathInfos containing the path from the parent + model to the current model, or an empty list if parent is not a + parent of the current model. + """ + if self.model is parent: + return [] + model = self.concrete_model + # Get a reversed base chain including both the current and parent + # models. + chain = model._meta.get_base_chain(parent) + chain.reverse() + chain.append(model) + # Construct a list of the PathInfos between models in chain. + path = [] + for i, ancestor in enumerate(chain[:-1]): + child = chain[i + 1] + link = child._meta.get_ancestor_link(ancestor) + path.extend(link.reverse_path_infos) + return path + + def _populate_directed_relation_graph(self): + """ + This method is used by each model to find its reverse objects. As this + method is very expensive and is accessed frequently (it looks up every + field in a model, in every app), it is computed on first access and then + is set as a property on every model. + """ + related_objects_graph = defaultdict(list) + + all_models = self.apps.get_models(include_auto_created=True) + for model in all_models: + opts = model._meta + # Abstract model's fields are copied to child models, hence we will + # see the fields from the child models. + if opts.abstract: + continue + fields_with_relations = ( + f + for f in opts._get_fields(reverse=False, include_parents=False) + if f.is_relation and f.related_model is not None + ) + for f in fields_with_relations: + if not isinstance(f.remote_field.model, str): + remote_label = f.remote_field.model._meta.concrete_model._meta.label + related_objects_graph[remote_label].append(f) + + for model in all_models: + # Set the relation_tree using the internal __dict__. In this way + # we avoid calling the cached property. In attribute lookup, + # __dict__ takes precedence over a data descriptor (such as + # @cached_property). This means that the _meta._relation_tree is + # only called if related_objects is not in __dict__. + related_objects = related_objects_graph[ + model._meta.concrete_model._meta.label + ] + model._meta.__dict__["_relation_tree"] = related_objects + # It seems it is possible that self is not in all_models, so guard + # against that with default for get(). + return self.__dict__.get("_relation_tree", EMPTY_RELATION_TREE) + + @cached_property + def _relation_tree(self): + return self._populate_directed_relation_graph() + + def _expire_cache(self, forward=True, reverse=True): + # This method is usually called by apps.cache_clear(), when the + # registry is finalized, or when a new field is added. + if forward: + for cache_key in self.FORWARD_PROPERTIES: + if cache_key in self.__dict__: + delattr(self, cache_key) + if reverse and not self.abstract: + for cache_key in self.REVERSE_PROPERTIES: + if cache_key in self.__dict__: + delattr(self, cache_key) + self._get_fields_cache = {} + + def get_fields(self, include_parents=True, include_hidden=False): + """ + Return a list of fields associated to the model. By default, include + forward and reverse fields, fields derived from inheritance, but not + hidden fields. The returned fields can be changed using the parameters: + + - include_parents: include fields derived from inheritance + - include_hidden: include fields that have a related_name that + starts with a "+" + """ + if include_parents is False: + include_parents = PROXY_PARENTS + return self._get_fields( + include_parents=include_parents, include_hidden=include_hidden + ) + + def _get_fields( + self, + forward=True, + reverse=True, + include_parents=True, + include_hidden=False, + topmost_call=True, + ): + """ + Internal helper function to return fields of the model. + * If forward=True, then fields defined on this model are returned. + * If reverse=True, then relations pointing to this model are returned. + * If include_hidden=True, then fields with is_hidden=True are returned. + * The include_parents argument toggles if fields from parent models + should be included. It has three values: True, False, and + PROXY_PARENTS. When set to PROXY_PARENTS, the call will return all + fields defined for the current model or any of its parents in the + parent chain to the model's concrete model. + """ + if include_parents not in (True, False, PROXY_PARENTS): + raise TypeError( + "Invalid argument for include_parents: %s" % (include_parents,) + ) + # This helper function is used to allow recursion in ``get_fields()`` + # implementation and to provide a fast way for Django's internals to + # access specific subsets of fields. + + # Creates a cache key composed of all arguments + cache_key = (forward, reverse, include_parents, include_hidden, topmost_call) + + try: + # In order to avoid list manipulation. Always return a shallow copy + # of the results. + return self._get_fields_cache[cache_key] + except KeyError: + pass + + fields = [] + # Recursively call _get_fields() on each parent, with the same + # options provided in this call. + if include_parents is not False: + # In diamond inheritance it is possible that we see the same model + # from two different routes. In that case, avoid adding fields from + # the same parent again. + parent_fields = set() + for parent in self.parents: + if ( + parent._meta.concrete_model != self.concrete_model + and include_parents == PROXY_PARENTS + ): + continue + for obj in parent._meta._get_fields( + forward=forward, + reverse=reverse, + include_parents=include_parents, + include_hidden=include_hidden, + topmost_call=False, + ): + if ( + not getattr(obj, "parent_link", False) + or obj.model == self.concrete_model + ) and obj not in parent_fields: + fields.append(obj) + parent_fields.add(obj) + + if reverse and not self.proxy: + # Tree is computed once and cached until the app cache is expired. + # It is composed of a list of fields pointing to the current model + # from other models. + all_fields = self._relation_tree + for field in all_fields: + # If hidden fields should be included or the relation is not + # intentionally hidden, add to the fields dict. + if include_hidden or not field.remote_field.hidden: + fields.append(field.remote_field) + + if forward: + fields += self.local_fields + fields += self.local_many_to_many + # Private fields are recopied to each child model, and they get a + # different model as field.model in each child. Hence we have to + # add the private fields separately from the topmost call. If we + # did this recursively similar to local_fields, we would get field + # instances with field.model != self.model. + if topmost_call: + fields += self.private_fields + + # In order to avoid list manipulation. Always + # return a shallow copy of the results + fields = make_immutable_fields_list("get_fields()", fields) + + # Store result into cache for later access + self._get_fields_cache[cache_key] = fields + return fields + + @cached_property + def total_unique_constraints(self): + """ + Return a list of total unique constraints. Useful for determining set + of fields guaranteed to be unique for all rows. + """ + return [ + constraint + for constraint in self.constraints + if ( + isinstance(constraint, UniqueConstraint) + and constraint.condition is None + and not constraint.contains_expressions + ) + ] + + @cached_property + def _property_names(self): + """Return a set of the names of the properties defined on the model.""" + names = [] + for name in dir(self.model): + attr = inspect.getattr_static(self.model, name) + if isinstance(attr, property): + names.append(name) + return frozenset(names) + + @cached_property + def _non_pk_concrete_field_names(self): + """ + Return a set of the non-pk concrete field names defined on the model. + """ + names = [] + for field in self.concrete_fields: + if not field.primary_key: + names.append(field.name) + if field.name != field.attname: + names.append(field.attname) + return frozenset(names) + + @cached_property + def db_returning_fields(self): + """ + Private API intended only to be used by Django itself. + Fields to be returned after a database insert. + """ + return [ + field + for field in self._get_fields( + forward=True, reverse=False, include_parents=PROXY_PARENTS + ) + if getattr(field, "db_returning", False) + ] diff --git a/testbed/django__django/django/db/utils.py b/testbed/django__django/django/db/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e45f1db249ca19fd7e7a247a348d8691553e537d --- /dev/null +++ b/testbed/django__django/django/db/utils.py @@ -0,0 +1,278 @@ +import pkgutil +from importlib import import_module + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured + +# For backwards compatibility with Django < 3.2 +from django.utils.connection import ConnectionDoesNotExist # NOQA: F401 +from django.utils.connection import BaseConnectionHandler +from django.utils.functional import cached_property +from django.utils.module_loading import import_string + +DEFAULT_DB_ALIAS = "default" +DJANGO_VERSION_PICKLE_KEY = "_django_version" + + +class Error(Exception): + pass + + +class InterfaceError(Error): + pass + + +class DatabaseError(Error): + pass + + +class DataError(DatabaseError): + pass + + +class OperationalError(DatabaseError): + pass + + +class IntegrityError(DatabaseError): + pass + + +class InternalError(DatabaseError): + pass + + +class ProgrammingError(DatabaseError): + pass + + +class NotSupportedError(DatabaseError): + pass + + +class DatabaseErrorWrapper: + """ + Context manager and decorator that reraises backend-specific database + exceptions using Django's common wrappers. + """ + + def __init__(self, wrapper): + """ + wrapper is a database wrapper. + + It must have a Database attribute defining PEP-249 exceptions. + """ + self.wrapper = wrapper + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + return + for dj_exc_type in ( + DataError, + OperationalError, + IntegrityError, + InternalError, + ProgrammingError, + NotSupportedError, + DatabaseError, + InterfaceError, + Error, + ): + db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__) + if issubclass(exc_type, db_exc_type): + dj_exc_value = dj_exc_type(*exc_value.args) + # Only set the 'errors_occurred' flag for errors that may make + # the connection unusable. + if dj_exc_type not in (DataError, IntegrityError): + self.wrapper.errors_occurred = True + raise dj_exc_value.with_traceback(traceback) from exc_value + + def __call__(self, func): + # Note that we are intentionally not using @wraps here for performance + # reasons. Refs #21109. + def inner(*args, **kwargs): + with self: + return func(*args, **kwargs) + + return inner + + +def load_backend(backend_name): + """ + Return a database backend's "base" module given a fully qualified database + backend name, or raise an error if it doesn't exist. + """ + # This backend was renamed in Django 1.9. + if backend_name == "django.db.backends.postgresql_psycopg2": + backend_name = "django.db.backends.postgresql" + + try: + return import_module("%s.base" % backend_name) + except ImportError as e_user: + # The database backend wasn't found. Display a helpful error message + # listing all built-in database backends. + import django.db.backends + + builtin_backends = [ + name + for _, name, ispkg in pkgutil.iter_modules(django.db.backends.__path__) + if ispkg and name not in {"base", "dummy"} + ] + if backend_name not in ["django.db.backends.%s" % b for b in builtin_backends]: + backend_reprs = map(repr, sorted(builtin_backends)) + raise ImproperlyConfigured( + "%r isn't an available database backend or couldn't be " + "imported. Check the above exception. To use one of the " + "built-in backends, use 'django.db.backends.XXX', where XXX " + "is one of:\n" + " %s" % (backend_name, ", ".join(backend_reprs)) + ) from e_user + else: + # If there's some other error, this must be an error in Django + raise + + +class ConnectionHandler(BaseConnectionHandler): + settings_name = "DATABASES" + # Connections needs to still be an actual thread local, as it's truly + # thread-critical. Database backends should use @async_unsafe to protect + # their code from async contexts, but this will give those contexts + # separate connections in case it's needed as well. There's no cleanup + # after async contexts, though, so we don't allow that if we can help it. + thread_critical = True + + def configure_settings(self, databases): + databases = super().configure_settings(databases) + if databases == {}: + databases[DEFAULT_DB_ALIAS] = {"ENGINE": "django.db.backends.dummy"} + elif DEFAULT_DB_ALIAS not in databases: + raise ImproperlyConfigured( + f"You must define a '{DEFAULT_DB_ALIAS}' database." + ) + elif databases[DEFAULT_DB_ALIAS] == {}: + databases[DEFAULT_DB_ALIAS]["ENGINE"] = "django.db.backends.dummy" + + # Configure default settings. + for conn in databases.values(): + conn.setdefault("ATOMIC_REQUESTS", False) + conn.setdefault("AUTOCOMMIT", True) + conn.setdefault("ENGINE", "django.db.backends.dummy") + if conn["ENGINE"] == "django.db.backends." or not conn["ENGINE"]: + conn["ENGINE"] = "django.db.backends.dummy" + conn.setdefault("CONN_MAX_AGE", 0) + conn.setdefault("CONN_HEALTH_CHECKS", False) + conn.setdefault("OPTIONS", {}) + conn.setdefault("TIME_ZONE", None) + for setting in ["NAME", "USER", "PASSWORD", "HOST", "PORT"]: + conn.setdefault(setting, "") + + test_settings = conn.setdefault("TEST", {}) + default_test_settings = [ + ("CHARSET", None), + ("COLLATION", None), + ("MIGRATE", True), + ("MIRROR", None), + ("NAME", None), + ] + for key, value in default_test_settings: + test_settings.setdefault(key, value) + return databases + + @property + def databases(self): + # Maintained for backward compatibility as some 3rd party packages have + # made use of this private API in the past. It is no longer used within + # Django itself. + return self.settings + + def create_connection(self, alias): + db = self.settings[alias] + backend = load_backend(db["ENGINE"]) + return backend.DatabaseWrapper(db, alias) + + +class ConnectionRouter: + def __init__(self, routers=None): + """ + If routers is not specified, default to settings.DATABASE_ROUTERS. + """ + self._routers = routers + + @cached_property + def routers(self): + if self._routers is None: + self._routers = settings.DATABASE_ROUTERS + routers = [] + for r in self._routers: + if isinstance(r, str): + router = import_string(r)() + else: + router = r + routers.append(router) + return routers + + def _router_func(action): + def _route_db(self, model, **hints): + chosen_db = None + for router in self.routers: + try: + method = getattr(router, action) + except AttributeError: + # If the router doesn't have a method, skip to the next one. + pass + else: + chosen_db = method(model, **hints) + if chosen_db: + return chosen_db + instance = hints.get("instance") + if instance is not None and instance._state.db: + return instance._state.db + return DEFAULT_DB_ALIAS + + return _route_db + + db_for_read = _router_func("db_for_read") + db_for_write = _router_func("db_for_write") + + def allow_relation(self, obj1, obj2, **hints): + for router in self.routers: + try: + method = router.allow_relation + except AttributeError: + # If the router doesn't have a method, skip to the next one. + pass + else: + allow = method(obj1, obj2, **hints) + if allow is not None: + return allow + return obj1._state.db == obj2._state.db + + def allow_migrate(self, db, app_label, **hints): + for router in self.routers: + try: + method = router.allow_migrate + except AttributeError: + # If the router doesn't have a method, skip to the next one. + continue + + allow = method(db, app_label, **hints) + + if allow is not None: + return allow + return True + + def allow_migrate_model(self, db, model): + return self.allow_migrate( + db, + model._meta.app_label, + model_name=model._meta.model_name, + model=model, + ) + + def get_migratable_models(self, app_config, db, include_auto_created=False): + """Return app models allowed to be migrated on provided db.""" + models = app_config.get_models(include_auto_created=include_auto_created) + return [model for model in models if self.allow_migrate_model(db, model)] diff --git a/testbed/django__django/django/forms/jinja2/django/forms/errors/dict/default.html b/testbed/django__django/django/forms/jinja2/django/forms/errors/dict/default.html new file mode 100644 index 0000000000000000000000000000000000000000..19e4fba33e9d4d4de3f7e4342f907de83dbbb53f --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/errors/dict/default.html @@ -0,0 +1 @@ +{% include "django/forms/errors/dict/ul.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/errors/list/text.txt b/testbed/django__django/django/forms/jinja2/django/forms/errors/list/text.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa7f870b474e63f59b6df640fa9b1005f73c16c5 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/errors/list/text.txt @@ -0,0 +1,2 @@ +{% for error in errors %}* {{ error }} +{% endfor %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/errors/list/ul.html b/testbed/django__django/django/forms/jinja2/django/forms/errors/list/ul.html new file mode 100644 index 0000000000000000000000000000000000000000..752f7c2c8b5aa77c3c2e1bd639565caac932f933 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/errors/list/ul.html @@ -0,0 +1 @@ +{% if errors %}
    {% for error in errors %}
  • {{ error }}
  • {% endfor %}
{% endif %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/formsets/div.html b/testbed/django__django/django/forms/jinja2/django/forms/formsets/div.html new file mode 100644 index 0000000000000000000000000000000000000000..0dda779d3f7324a991ab83c60a3d9c6e57fb70cd --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/formsets/div.html @@ -0,0 +1 @@ +{{ formset.management_form }}{% for form in formset %}{{ form.as_div() }}{% endfor %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_option.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_option.html new file mode 100644 index 0000000000000000000000000000000000000000..bb9acbafd9730d939ca3fddc60d551662d4f635c --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_option.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input_option.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_select.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_select.html new file mode 100644 index 0000000000000000000000000000000000000000..780899af446da6c3de2150e81aeede139afc8ed9 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/checkbox_select.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/multiple_input.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/date.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/date.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/date.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/file.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/file.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/file.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/input.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/input.html new file mode 100644 index 0000000000000000000000000000000000000000..d5651571f229e2d755324dd62d889722866abfba --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/input.html @@ -0,0 +1 @@ + diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/input_option.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/input_option.html new file mode 100644 index 0000000000000000000000000000000000000000..48cd65b93af413f3540535a0a7fe425bd184dc94 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/input_option.html @@ -0,0 +1 @@ +{% if widget.wrap_label %}{% endif %}{% include "django/forms/widgets/input.html" %}{% if widget.wrap_label %} {{ widget.label }}{% endif %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/multiple_hidden.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/multiple_hidden.html new file mode 100644 index 0000000000000000000000000000000000000000..b9695deb0220ddea6a194282c3824259a94605f8 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/multiple_hidden.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/multiwidget.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/select.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/select.html new file mode 100644 index 0000000000000000000000000000000000000000..ea3bc84113da4dcc7052695591c3511efea18208 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/select.html @@ -0,0 +1,5 @@ + diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/select_date.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/select_date.html new file mode 100644 index 0000000000000000000000000000000000000000..32fda82609f8b44d7199901b51d707899b93cdd8 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/select_date.html @@ -0,0 +1 @@ +{% include 'django/forms/widgets/multiwidget.html' %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/splithiddendatetime.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/splithiddendatetime.html new file mode 100644 index 0000000000000000000000000000000000000000..32fda82609f8b44d7199901b51d707899b93cdd8 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/splithiddendatetime.html @@ -0,0 +1 @@ +{% include 'django/forms/widgets/multiwidget.html' %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/text.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/text.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/text.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/time.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/time.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/time.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye.png b/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye.png new file mode 100644 index 0000000000000000000000000000000000000000..80ed9dc19053e14a73b134204751c398aae7ed0a --- /dev/null +++ b/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3c8e2826a9f80720030e79a77708c25ee6540eaea580eaad9f018f4cc14e5e0 +size 635 diff --git a/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye_large.png b/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye_large.png new file mode 100644 index 0000000000000000000000000000000000000000..67581e4f57e9111c8c5748785e4c57f056a28426 --- /dev/null +++ b/testbed/matplotlib__matplotlib/galleries/examples/user_interfaces/images/eye_large.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a105db064766918ef3977bd9c56d9c2f6eb9889eea009d91b23a566339754cd9 +size 1149 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stix_64.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stix_64.png new file mode 100644 index 0000000000000000000000000000000000000000..cee6ae1f32b81792dbc8a9da62b0a07cf36b6476 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stix_64.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39b3bdf86b0a3da1e0ca562b3fd86de98ca1afad75207f5edd495df7c6c105b6 +size 7005 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_61.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_61.png new file mode 100644 index 0000000000000000000000000000000000000000..d362571710eb7e7de2240b48582a4801515ab6e3 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_61.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df530da504cafbffc8fff75f695753328539faa2a925f1cbad6ce4a563bb6ea2 +size 8433 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_64.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_64.png new file mode 100644 index 0000000000000000000000000000000000000000..b0acbdcde75ad07d7ebb6ab6f07150130fa55941 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathfont_stixsans_64.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b053cc5a0802c37f636083f004f2e6fc6784061ec6d4d8f213c6aaa5ec23eb23 +size 6997 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_06.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_06.png new file mode 100644 index 0000000000000000000000000000000000000000..4ec133947b0ef4df8aaf813eda02d68b9e56afe9 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_06.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6111bf09acc13ebb168601f3be889ff5987aa518b845d89d61dba2d61c3d731c +size 4782 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_07.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_07.png new file mode 100644 index 0000000000000000000000000000000000000000..170a6912d82bac542fe0bb19c813d9c294764ade --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_mathtext/mathtext1_dejavusans_07.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b4531ff77a909348e02b974398c0a1e56151b20264c0bade8d5f754cdf3209e4 +size 5696 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_alignment.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_alignment.png new file mode 100644 index 0000000000000000000000000000000000000000..a91d4d822642bb06325d70ef9ba028acd5ad15ab --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_alignment.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:874af1dc788e6167ed9fa692cb764acf43fb6e7ea949c42c8113e538987df4bd +size 55353 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_axes.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_axes.png new file mode 100644 index 0000000000000000000000000000000000000000..ece00a59bf3ca7bc8cc9f9bb35565f8da2ca476c --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_axes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a668ea8ac508006a164f48bc00c54aa8b7179b359072abc0070f88593b4bd53a +size 71122 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_coords.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_coords.png new file mode 100644 index 0000000000000000000000000000000000000000..8489db3663aaa1fda2ec7e0a200c04bf58807fef --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_coords.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ec9900b6e837eb635c7b32332b1d02a1867f201b43b69335d3db3a13bfbd76b +size 9323 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim.png new file mode 100644 index 0000000000000000000000000000000000000000..d78cd8282b8d1a316712cce4a9b6bc75c856b008 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb81328854ca08ee87c05734f8938de35dcc83563c3fe3d680a1825604757efb +size 63099 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim_rorigin.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim_rorigin.png new file mode 100644 index 0000000000000000000000000000000000000000..56d21518603426fde77764d51cdee82b5081e242 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_invertedylim_rorigin.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec4d628873b617bd8688b78f9ed4be565f637f556d443daf6d429ed454bb657c +size 73761 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_log.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_log.png new file mode 100644 index 0000000000000000000000000000000000000000..72f09de1f1fbfe25cc1a4a0a79ba357e29b69d3f --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_log.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47b573da3fae128cd8512a53b01fc5811900aff5f6db21a054960bd2aa46a434 +size 41668 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_negative_rmin.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_negative_rmin.png new file mode 100644 index 0000000000000000000000000000000000000000..37117e6598ca0dde96c27056fa99fb990df5bc91 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_negative_rmin.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac5a9e678ca9002c427aa0dba1514434c3aca964024aad3b091888055411a39d +size 73626 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rlabel_position.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rlabel_position.png new file mode 100644 index 0000000000000000000000000000000000000000..ae51fe16e537da17be6a8d2b4793d9709d2f37c4 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rlabel_position.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77b7f13f69a71be55087b697c44abdb6f702b1aabc30be053049c2c3a46cedc9 +size 48771 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rmin.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rmin.png new file mode 100644 index 0000000000000000000000000000000000000000..469e7779f28acdcf5f24fa089d10175d016b523b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rmin.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bee0ec86bc2d2f8d86946e61c6f3fb75ec2d00523944a51abf5393140df19c1 +size 70018 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rorigin.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rorigin.png new file mode 100644 index 0000000000000000000000000000000000000000..62c4786f50558cc913bfda323e1d24fae49daf91 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_rorigin.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b8297cdf231d797f412da770802428c173ade9fa99648f3d2ffd8fe47fadea2 +size 81225 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_position.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_position.png new file mode 100644 index 0000000000000000000000000000000000000000..f0904502d18f2d1724d934487b1350ece5a3c04c --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_position.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aab1f97f2cb16b606633cdac92de719c3bda3000a8dd659cbd29329f6e7b2526 +size 90981 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_wedge.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_wedge.png new file mode 100644 index 0000000000000000000000000000000000000000..f93be9cd755015f152b04b7c2cd664267feb56a1 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_polar/polar_theta_wedge.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daef9e99131ee93d89c0017d5ad042be116d559a78ae04b0aba0158300f085c6 +size 167601 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_tightlayout/tight_layout_offsetboxes2.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_tightlayout/tight_layout_offsetboxes2.png new file mode 100644 index 0000000000000000000000000000000000000000..e2f9575aa85c2f11a7ade2cd3e80ff46fd698774 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_tightlayout/tight_layout_offsetboxes2.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3633cf36b4e6ebca2a3e95f05ab3dff5574930297e41dc93d3603ad8739f9ec7 +size 36237 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_transforms/pre_transform_data.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_transforms/pre_transform_data.png new file mode 100644 index 0000000000000000000000000000000000000000..94c1a45ee4dc6945254ef64ab79d7b3b42854591 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_transforms/pre_transform_data.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc3ebaf629860008f11291bf10b8dbb3cc981fe72a102db0c9282a2f0b307a49 +size 72601 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_contouring.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_contouring.png new file mode 100644 index 0000000000000000000000000000000000000000..80fee6415824025d7d68db149e2e9f27f9668bfd --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_contouring.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14e8c21ce4b2c983552e6b3d888e49c0fcf7cccabafb5ab8de34217ac8800f11 +size 384808 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_gradient.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_gradient.png new file mode 100644 index 0000000000000000000000000000000000000000..ce2b5bc85e56453c017832707a65c0519b8c306e --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tri_smooth_gradient.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:841b29317a12d678e3002ccb80902601402620c83cae5f8058ade8a449e3136b +size 289977 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tripcolor1.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tripcolor1.png new file mode 100644 index 0000000000000000000000000000000000000000..87f338fa809b8dd07c235523184b7c736b7b503b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_triangulation/tripcolor1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7049d431e7f2557965e66e811975ccaf3f5d99f0815f96e4dddbc761b450855 +size 43755 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_bar_units.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_bar_units.png new file mode 100644 index 0000000000000000000000000000000000000000..563c6cb0aa8fb38aa738a35f221894afa6b6c555 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_bar_units.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8e51509bdd8962ba160f2dda4efb64f9b2c672aa167ff1c6525f653f47700f4 +size 20363 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_barh_units.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_barh_units.png new file mode 100644 index 0000000000000000000000000000000000000000..f5376b9c54aab22b7ab56d8c3512374fd1a484f8 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/jpl_barh_units.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc14c106b951aeab06d10fab70a1ef29ee82a7e104c241b8569077a303dc2d3f +size 14626 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_masked_units.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_masked_units.png new file mode 100644 index 0000000000000000000000000000000000000000..d8a674b8c14b327a8e7e87bdcc828afc76b2855a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_masked_units.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a1c02ed5a92f40e908cec4186caa3c3029f517c8d1d98fec6d267509cb3e5c7 +size 8955 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_pint.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_pint.png new file mode 100644 index 0000000000000000000000000000000000000000..0ff83a78845dc475f8cad9e7f3c44a5af788be6d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_units/plot_pint.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a1ddbec807873c67354795c83a1bf52938ca31d5c721ee81a7c9c055cb668c9f +size 28590 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/eqnarray.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/eqnarray.png new file mode 100644 index 0000000000000000000000000000000000000000..afd35cb9386788953e77bb462e53f904ab13dd72 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/eqnarray.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf7c227f58369253d6c765e10233f919de40cbf098f4f16fcf066e2ff212d5c6 +size 1322 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/rotation.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/rotation.png new file mode 100644 index 0000000000000000000000000000000000000000..5bf884624259f46fbf7a0f675a700b2fc3f88cb5 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/rotation.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6aa917e15ad5ad7446b092304c01b6e3f78014c0fc97a186867b083de3fbd38f +size 41784 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.pdf b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.pdf new file mode 100644 index 0000000000000000000000000000000000000000..d8fb27dbd63e5fbc2e1d8b4247105e34f6b80939 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.pdf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82a992bfb2f4958ef8f28dc3469c6299377a971a89c29da7b506bdfd78337937 +size 125933 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.png new file mode 100644 index 0000000000000000000000000000000000000000..d692311999cd529993130d50693e594771b204ab --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_usetex/test_usetex.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6be0a95726f017071f7bc25dbe2d2e54c2f68671dd9dec9d26a8985b7e83646b +size 13512 diff --git a/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_widgets/check_radio_buttons.png b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_widgets/check_radio_buttons.png new file mode 100644 index 0000000000000000000000000000000000000000..1fb466bae164e3a3763bd54bffb57ac5abfb8b32 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/matplotlib/tests/baseline_images/test_widgets/check_radio_buttons.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8acfece49f5261b0f33b704c02659d95ba8f18e3c01e2ee4ae9fc365c888a378 +size 24315 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_artists.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_artists.png new file mode 100644 index 0000000000000000000000000000000000000000..9b39b74c581c4d41d9d766029c2ec71f8ec97cba --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_artists.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ad59ffbe8308d32190f814b9a2b1bf307d11b0d6cd5075c86b69e0669266301 +size 8380 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows.png new file mode 100644 index 0000000000000000000000000000000000000000..6b7fd195f4e1255f244a48c1e3c076f89ed5a9b3 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cc646806efff64f7a6ca848d86fbe5c154aa5cb793b9a6c6c61f65af8f9600d +size 10784 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows_many_args.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows_many_args.png new file mode 100644 index 0000000000000000000000000000000000000000..03cde4fbe8b8f33eab0d7eb0b7612a96353e1016 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_direction_arrows_many_args.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e43ec76685de12d60f90c4c6f9e4ceb7880816982d86e32b5c2ce4fdb4c003d +size 11039 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_locator_base_call.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_locator_base_call.png new file mode 100644 index 0000000000000000000000000000000000000000..65936c0fb737b2cbf49a354315954ed5379b30e6 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/anchored_locator_base_call.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdbe13f6cb2ab55e7c1f3ea2c4fee3d544d48b62fa7e17e23b73fb16b842f8ec +size 2272 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/fill_facecolor.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/fill_facecolor.png new file mode 100644 index 0000000000000000000000000000000000000000..747993f3d3368e2f28c6b1e1fff289c999911aea --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/fill_facecolor.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4af297179bc66f12b9666340ad2a5d25fc9bd42c414ea4626ecbd9d82de328 +size 14845 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid.png new file mode 100644 index 0000000000000000000000000000000000000000..77cf471aaaf0f675e271120305f0ace10cf3de6a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c8838de66dd394c84598fa34350c4a46ec332a09c59b5f55dfda2b665b560be +size 3786 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_each_left_label_mode_all.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_each_left_label_mode_all.png new file mode 100644 index 0000000000000000000000000000000000000000..377b934045c47ac6b5e0bf16ed889997fc8fe9d7 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_each_left_label_mode_all.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea5177311b5a0d47e2a23ce215f9eb5461cba2b69b30919019e21bb86ee22316 +size 9986 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_single_bottom_label_mode_1.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_single_bottom_label_mode_1.png new file mode 100644 index 0000000000000000000000000000000000000000..03cce09f0123642258d0ed0e619194880f16839a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/image_grid_single_bottom_label_mode_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59b50bd0ae0127ae61cab8db1de5f81b21a70a627d6ab82f1a3b0f876cf97ef4 +size 5210 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/imagegrid_cbar_mode.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/imagegrid_cbar_mode.png new file mode 100644 index 0000000000000000000000000000000000000000..273e7e5dbab3d359e68aa7d85d29968ed40ba18d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/imagegrid_cbar_mode.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa299901f5355ea41b4b032febd5e7c1d62bb5c10b39f15133b1e80835613199 +size 33936 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_axes.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_axes.png new file mode 100644 index 0000000000000000000000000000000000000000..8252f78fec68aa57b8e9af7af328943f168ff50c --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_axes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:450991dfd13a56c92f97b1b82c89c789b5bd13554ad10802bc8fa10656fe1361 +size 9928 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_locator.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_locator.png new file mode 100644 index 0000000000000000000000000000000000000000..fb77922d26405d63c3db5c3e16ebc9ff27d6ddd0 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inset_locator.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d028a294ba853f119c194fff45cf37952ac2b051404e655e79838bd551b9c25 +size 9490 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/insetposition.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/insetposition.png new file mode 100644 index 0000000000000000000000000000000000000000..0ad0c15f5b5459bdd890a06969f1a5d3482be5d8 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/insetposition.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ab120853ff14c08029912e35ab45754fb4dc547d5f1e2cfaae100fcf5a06205 +size 1387 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inverted_zoomed_axes.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inverted_zoomed_axes.png new file mode 100644 index 0000000000000000000000000000000000000000..b3275da3e1e66e073bcc95e14cdd43cade07b18b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/inverted_zoomed_axes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aef825b0b83c2a5f6313f26e913279077107a33b08609b1a600d0920e89c64bf +size 25997 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/rgb_axes.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/rgb_axes.png new file mode 100644 index 0000000000000000000000000000000000000000..089a4b7cb7d28864031a64ec4ede55b1b1e2f92f --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/rgb_axes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee03311de4e6c5f86d3785e0e2f516c1ad1822cdb77e6cf700925666b8067aee +size 6880 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/twin_axes_empty_and_removed.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/twin_axes_empty_and_removed.png new file mode 100644 index 0000000000000000000000000000000000000000..4e54a82bc9c039b75380d2a4e511b8d41445678a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/twin_axes_empty_and_removed.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d18ce4161c6ce12046fc51269d6075d1b5c8c65f5aabb589bde400a879b426b4 +size 37701 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/zoomed_axes.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/zoomed_axes.png new file mode 100644 index 0000000000000000000000000000000000000000..f08c3b3d5aee6db2372b78c3ae0f87a90362e65b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axes_grid1/tests/baseline_images/test_axes_grid1/zoomed_axes.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:994bbccc9b73f0531bee1e65e027e9de8062f6368d4793a8c9a0e0c2aa40669e +size 25893 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist.png new file mode 100644 index 0000000000000000000000000000000000000000..745c2a20ffef0f3e0dd7a8fdec00f0080169ecce --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9d964f543d270200df5104e86836a2e626f9a45edf290aec2e42daf3e44f01b +size 10151 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_labelbase.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_labelbase.png new file mode 100644 index 0000000000000000000000000000000000000000..337508a3f7b3781a46fb68a570d54d5a3df388a8 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_labelbase.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:027e65a2fb6f022340d4d648f84f243a3e9e613aee40cab07f727ba57c1d9380 +size 10598 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticklabels.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticklabels.png new file mode 100644 index 0000000000000000000000000000000000000000..83ce5aa6eb37c06947745c58e46da1df2759470b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticklabels.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eefb802a422a729809adcd8017ba2c95ff84fec0df4a50a8ca6c9cf3bbb8016b +size 5696 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticks.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticks.png new file mode 100644 index 0000000000000000000000000000000000000000..15dcb8e418f8473926e092f5f21b735daacc64f0 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axis_artist/axis_artist_ticks.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a456d0961b5dd5896d30d83b1bcdf5d2be81561d869aa9061e118a5b29b3a0f +size 5695 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/ParasiteAxesAuxTrans_meshplot.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/ParasiteAxesAuxTrans_meshplot.png new file mode 100644 index 0000000000000000000000000000000000000000..e9e8188131a09aa21e9a025a8ba7c142c71dc697 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/ParasiteAxesAuxTrans_meshplot.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f74483d88b0f0601bd03702ab94c32fd5d9eb8aa7c1bf9168b5e86e70f09bcac +size 34392 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/Subplot.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/Subplot.png new file mode 100644 index 0000000000000000000000000000000000000000..55f253f50bae6e7ab5b28e8ea5433f0e33e3b6cb --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/Subplot.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b51a580a3479cd491a7c0f390d5998dddb93128bb0099aba8c3c1217852c052f +size 26919 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/SubplotZero.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/SubplotZero.png new file mode 100644 index 0000000000000000000000000000000000000000..6f1949563c132028014ca3d3dab7a77c9dc6f845 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/SubplotZero.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de40abcfb1d0318acade20e06207fc9328a0c51b4818505b714cc93ed89787f1 +size 28682 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style.png new file mode 100644 index 0000000000000000000000000000000000000000..bd2dcbc44fee41d1ba69c6c77ed8db1f8f0dbd7a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fca5e665dc651690de1606814c855f4806895534235701e00596bec2dca6a775 +size 1323 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_size_color.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_size_color.png new file mode 100644 index 0000000000000000000000000000000000000000..8d8ee211788758326abb9ad9c5b00e5b1dc7bb19 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_size_color.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e5a93d35b81864ab140bf99ac6aa3a5295607332b2d8fa33b75b01f83647726 +size 1521 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_tight.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_tight.png new file mode 100644 index 0000000000000000000000000000000000000000..9941b9b8597d461e9378eabc7f7a0158185929f6 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/axisline_style_tight.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c576810e48294f0e03e16eaedc88dd58f22b38ef611f540325ecfbc3de282e1d +size 1876 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/subplotzero_ylabel.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/subplotzero_ylabel.png new file mode 100644 index 0000000000000000000000000000000000000000..6de6f3f7807bef047f9928976df5cc26b60dba03 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_axislines/subplotzero_ylabel.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbea55663390288f61a147387982ffb61fc6dd4bd0e8d3e7d3d3b5fd3afedf2f +size 6872 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear3.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear3.png new file mode 100644 index 0000000000000000000000000000000000000000..646892caacef825651d61a30543abf3251aeb4bf --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2d87b63be3ff5857a5f6e51a895b43a9f560f319109cc45d647cda20920a331 +size 52835 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear4.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear4.png new file mode 100644 index 0000000000000000000000000000000000000000..91a391ac1c55e65eafb376d98205f5f23c579f04 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_floating_axes/curvelinear4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7188eb4a21fa32fa2bf95866c0d5205fb2dedff935aeeae977c2f9be14dfd7ab +size 29374 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/axis_direction.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/axis_direction.png new file mode 100644 index 0000000000000000000000000000000000000000..f6c4f2f33dd5ef8e0e5641ab84a6c4ff9d98905f --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/axis_direction.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ddfb9ef76760fa7b582345d7d27077d481698114f65778b3aa3ae62ef11d60de +size 40536 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/custom_transform.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/custom_transform.png new file mode 100644 index 0000000000000000000000000000000000000000..2638049c58f4526409818b0032c06e7d972d1762 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/custom_transform.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e1c4212052b5cfca0f515caf907359fe6f8609b8a49946d3be4bd8546cba538a +size 15118 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/polar_box.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/polar_box.png new file mode 100644 index 0000000000000000000000000000000000000000..5186922b55a2aca7a2133f1df30790b42cec6b4f --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/axisartist/tests/baseline_images/test_grid_helper_curvelinear/polar_box.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1668f33723bff8df94a4550a888206fc047ad604c5abb1257231941027a7c4e +size 62526 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_array.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_array.png new file mode 100644 index 0000000000000000000000000000000000000000..953864f32cc5afb4bbc4b8ba277897a50732d188 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_array.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:72f2225d5ad1b7d551ab85ab5f64803a73a95dff084f759898412132e95d1003 +size 60601 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_scalar.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_scalar.png new file mode 100644 index 0000000000000000000000000000000000000000..cd8b9fc29809d788147c0c5e03f1a34d8f36891a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/add_collection3d_zs_scalar.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1842e5c7c9e7a66ea8feec82cd5275a7b0056e9806020472cb01a791ad6b0c05 +size 51719 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/arc_pathpatch.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/arc_pathpatch.png new file mode 100644 index 0000000000000000000000000000000000000000..bd6ed19d34c1146d64b1ce0a034febc9d9fac3d5 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/arc_pathpatch.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:acbee76c1dce323e4fb1743959e50591c8b6ad6ce09c367272a7724166316df2 +size 49209 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects.png new file mode 100644 index 0000000000000000000000000000000000000000..654d3c60c710cb7e4f67bee50e79c8d581aa169d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7865eaff88e4d0d72a24e7cc9523035cdb180f29edbd49d6ec154022abd8a9ed +size 90753 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects_adjust_box.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects_adjust_box.png new file mode 100644 index 0000000000000000000000000000000000000000..0dfd533a00b6df217aa307a0ddf3080048b9b8b3 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/aspects_adjust_box.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:266ded92d034abfbcb8be2d8b4918d9342b5922fc5c81b56e9233c75c718b670 +size 81910 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_cla.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_cla.png new file mode 100644 index 0000000000000000000000000000000000000000..9609f6f73f05de2d78fec8c037a68ee4791d97bd --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_cla.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da18568af33b3680961c31c18fc0fcc894fa48587d806870719605d4daafbefe +size 56647 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_focal_length.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_focal_length.png new file mode 100644 index 0000000000000000000000000000000000000000..a9d1938f2f5278482d5455276f9433962867f319 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_focal_length.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:98c12104126ebf15c0b24331a4acd46d3ae7cab5e02b3aea09235aa719f601d3 +size 64443 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_isometric.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_isometric.png new file mode 100644 index 0000000000000000000000000000000000000000..528f1460e5938f48347f3b561eb24864e2a9beb0 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_isometric.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ba0ad84d3e966cfef58c36cf05192d14fe74e5820882cb109a7a1834cde292d +size 59242 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_labelpad.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_labelpad.png new file mode 100644 index 0000000000000000000000000000000000000000..66d047b32f77aa3f93047a98024266ca55ad9159 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_labelpad.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7872704dca631f4d2beb8ef37856dd0ef5a580e165fa63ecf70aa1052c1365a6 +size 74765 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_ortho.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_ortho.png new file mode 100644 index 0000000000000000000000000000000000000000..487c439eb05062777a279d0a1ddb09bc64816e8b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_ortho.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf84b08ed72b94194ef51ff8ff19651469084a0b1946002acca00f1445eebd97 +size 47677 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_primary_views.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_primary_views.png new file mode 100644 index 0000000000000000000000000000000000000000..d9dfc1ed0a79b8a64a768580b44af7303b9ce9b9 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_primary_views.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce05d1784cee203655103a85726ef8e8feac5c3894299d996de7ff0028d4bd10 +size 10427 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_rotated.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_rotated.png new file mode 100644 index 0000000000000000000000000000000000000000..06d01f71caed4768d32e8136f4021879a08f5811 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axes3d_rotated.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5fe91e10839d5e73e5553ab99b6f81e2017a2be84c30c772a95d4aae3213ac3 +size 20905 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axis_positions.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axis_positions.png new file mode 100644 index 0000000000000000000000000000000000000000..77d6e40a2c5f45975c991a2bff15952556bf5f2c --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/axis_positions.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ba03ab512e5ae132d9c9a40295976f9c04ee61493a80c2c525146a09f531948 +size 95870 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d.png new file mode 100644 index 0000000000000000000000000000000000000000..ed3f8782f5a625b2c09d0b61507a73e05c33cb4c --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d1d259f7c4488e21248c4c577f6943506c1d545a4e123899cb7972e8fd215c6 +size 76603 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_notshaded.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_notshaded.png new file mode 100644 index 0000000000000000000000000000000000000000..d4c3dadd4f344b4f8ea1019ce5d24feba19e0b95 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_notshaded.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58fb3871e13804eab0731a5159e962d8e6d8a3661bbc02b6e5b392653528e2c5 +size 36915 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_shaded.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_shaded.png new file mode 100644 index 0000000000000000000000000000000000000000..d586b63f62ab92a20b4de4fb05979ed1abb114c2 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/bar3d_shaded.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4390cdb0a5fce23f8fa8db2af90d212ea96b4c059e07aaf53ff1d977374b8f26 +size 124130 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/computed_zorder.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/computed_zorder.png new file mode 100644 index 0000000000000000000000000000000000000000..aef7c2cdd9fdfe020fb5640ae722c9cfc8d3456b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/computed_zorder.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a542026bd44bfebbd6aa4562ce0b764c858b0875d80e007bc55efafc17d084a3 +size 39779 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d.png new file mode 100644 index 0000000000000000000000000000000000000000..afb8c1b85408065950836fba2dba9a13b8f86dcb --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb0b9d5f2698db08ddfd75b7e10e01ae2b28542d84d5635abbd3b7e6b2c1aebc +size 81169 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d_extend3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d_extend3d.png new file mode 100644 index 0000000000000000000000000000000000000000..dc2c295b8f3e3b7f2f9910d73e133f9a517f4c0a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contour3d_extend3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:726595eb2babddd5d735de08bd19bb1f070c350af66dd1a205a221868a1e3f8b +size 64164 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d.png new file mode 100644 index 0000000000000000000000000000000000000000..759659ef8e439795ee031c79153c2a24bf467460 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68297f87232b1813b68565c0b02c26dcd13d74cfad01d32efe82f99d137ddded +size 53435 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d_fill.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d_fill.png new file mode 100644 index 0000000000000000000000000000000000000000..1a0d9a76295d00d8f843cfbbdc48f092f595ae4a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/contourf3d_fill.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74472f680126a86cd6bbb754f6d43c6797ddcb1aad45a661afad6b3bef86a02b +size 48850 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/equal_box_aspect.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/equal_box_aspect.png new file mode 100644 index 0000000000000000000000000000000000000000..7b5f27b326c5e7cfdf82e247e813ee809c405627 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/equal_box_aspect.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:557c2930198bf6cfbcb64e0a04306632774e0bbeca0f1b9063af1c8e38328b8d +size 55467 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d.png new file mode 100644 index 0000000000000000000000000000000000000000..80d6ed36ac5d0c9258e2af96398280c046bb0c34 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ade1e668586fdd2575a5b97c6987b3e4d6a9cc02489f7ab5f0ddb21087b8a804 +size 58282 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d_errorevery.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d_errorevery.png new file mode 100644 index 0000000000000000000000000000000000000000..00a11e81cec43828e5fecf17fd5313a11ed2a38d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/errorbar3d_errorevery.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03dc46ced255d3496b66e48c37e454eaa78f71eae79a69517df884115c0410bd +size 65644 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/grid_off.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/grid_off.png new file mode 100644 index 0000000000000000000000000000000000000000..d0c888cef5f8e7f21e6842d7a0371447703e90c7 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/grid_off.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03276ec3b9b746770a2eb3c9fdacd1d07b13d40b179e4590aee878c453dd1f75 +size 12463 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/invisible_ticks_axis.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/invisible_ticks_axis.png new file mode 100644 index 0000000000000000000000000000000000000000..8266ad004b520deac86a7bd03dc6c00c7d9d797a --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/invisible_ticks_axis.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88372937ee288883a2b34564b460a9c7312f97116b2175be000447cc45a8ee43 +size 6753 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/lines3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/lines3d.png new file mode 100644 index 0000000000000000000000000000000000000000..88a3c80b016b26e7ff4fb21ca4aeea771f48447e --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/lines3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d838c1738c3bb2587c7e8e4bc0bf6a8eca07d821a207db5267859674f063cbd +size 60212 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/minor_ticks.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/minor_ticks.png new file mode 100644 index 0000000000000000000000000000000000000000..266e02465e8075008050d867358c81954e7cfbc2 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/minor_ticks.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9542f2b92afb5fdd4a566d62ad61cdce14c9371d480f255d735a41fec5ba6c0 +size 62299 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/mixedsubplot.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/mixedsubplot.png new file mode 100644 index 0000000000000000000000000000000000000000..0b5e27ed1024bd0c5160e8e5cbbdde6c6d939ac1 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/mixedsubplot.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d7d7e11dc3f2f47be0cd77b0f4ff352131af11553053492feb3fa5d633ce832 +size 48446 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/panecolor_rcparams.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/panecolor_rcparams.png new file mode 100644 index 0000000000000000000000000000000000000000..a3e4177517d7fa552db29bef9e3eb1d1f298f2e7 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/panecolor_rcparams.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cd7d43e5ac2a521a2a3bb0285df9db6d0b0c79f573272018dd213257aacc0e53 +size 4552 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/plot_3d_from_2d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/plot_3d_from_2d.png new file mode 100644 index 0000000000000000000000000000000000000000..7e1bf74e5191baf1db733bbe3f3acb7061638661 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/plot_3d_from_2d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a11edcb26059e4813d260d4caf3df0e5d77e2ed674b53bb04c9bb32f9da1dba +size 53273 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_alpha.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..7f50971ef45b14210dde4768fa1a37da306943c0 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_alpha.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88475b455e0aac771d7d931046548e8435163dfb795b2b7437ea171c3ffc7948 +size 57667 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_closed.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_closed.png new file mode 100644 index 0000000000000000000000000000000000000000..7f50971ef45b14210dde4768fa1a37da306943c0 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/poly3dcollection_closed.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88475b455e0aac771d7d931046548e8435163dfb795b2b7437ea171c3ffc7948 +size 57667 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube.png new file mode 100644 index 0000000000000000000000000000000000000000..8d0fa2223d4df3ad06da4e34e190fd4e454486df --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:009d04a1acaf741a32c2939459cc5b310d2807b713cdca161a01b2c76aa040c5 +size 23182 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube_ortho.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube_ortho.png new file mode 100644 index 0000000000000000000000000000000000000000..8e4cf45bd8f062ce9912e7928869b9aa99a1577d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/proj3d_axes_cube_ortho.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4f873ec172549c899a60e120eef9e510faffe29076c4a9179058275e1f2c109 +size 16210 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d.png new file mode 100644 index 0000000000000000000000000000000000000000..830068098634fe67ccd88323ddd45b6e964f3484 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c113f19922b552791ee941d51e84f73c502e21ee1da7d8e7da9abfaeeed31853 +size 61315 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d_masked.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d_masked.png new file mode 100644 index 0000000000000000000000000000000000000000..43817a90c5977e8396c36235f32f6cf71f5fe4c7 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/quiver3d_masked.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ad302f2fedae0d28fe89cca441dcbcbef3c3fdfec47eed3618d05ff9b24d21f +size 77334 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d.png new file mode 100644 index 0000000000000000000000000000000000000000..386441985876ca0578f0083ee9ce21c507d74b71 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6bf01d194cc420f8dbf2aad8ec9a0e2bed68be58600c02e3d2561bfef58d27b3 +size 41184 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_color.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_color.png new file mode 100644 index 0000000000000000000000000000000000000000..d7d25c81acab57f37d0491df9cd1801667e09613 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_color.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29631684baf6d8183e692d5ca0f6e732b1f7742d66305993ddcf15086ee72902 +size 40740 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_linewidth.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_linewidth.png new file mode 100644 index 0000000000000000000000000000000000000000..3b3a4dad0e916026355ce12160ae6c74d9eef805 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter3d_linewidth.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65a9ddf6638911aafe9aa141e800179dadff23d835f29a2b6661766638750f5c +size 46027 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter_spiral.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter_spiral.png new file mode 100644 index 0000000000000000000000000000000000000000..8b4ff7ec7fe56dda976b34c2848c1fd1a3d64549 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/scatter_spiral.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b47f07987fcbd8984ad8f8138f31ee524ebc660a78762b9f6fa4b45a69a3749 +size 99692 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/stem3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/stem3d.png new file mode 100644 index 0000000000000000000000000000000000000000..5c30092f5b36229f6788f63bcf523f1b22dfbd40 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/stem3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da7d410aba0d83b4a25eb061d59a09bb3138f02a38d8095519d3a2693e47c28d +size 236147 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d.png new file mode 100644 index 0000000000000000000000000000000000000000..c0173dbc93a9036512b7061e25b6dbaa74438782 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36304b9e84697ef098e6aed18f7ec0050e8f0856b2675bebebe6a2e6fdf86923 +size 50411 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_label_offset_tick_position.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_label_offset_tick_position.png new file mode 100644 index 0000000000000000000000000000000000000000..04a1314e3e2aa45913cdf2adbcca425b7f394a76 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_label_offset_tick_position.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9fbe8fa2e3308182d81017a48caf315eca6cdef71b23431088f76d1246a402f +size 101663 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked.png new file mode 100644 index 0000000000000000000000000000000000000000..97b8cd5dd02d71630a37a1894165d173c77a28d3 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc5a303aef7f4193bf477023364f7fccdf025050d3992f2067f67d87c9e358ce +size 41837 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked_strides.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked_strides.png new file mode 100644 index 0000000000000000000000000000000000000000..92971f0b9f7ab4d81995ae0c4fc7a677bb0a94dd --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_masked_strides.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0581bb24841712a1533985ff38b908af86ce7df5e284d1d7dce61d493bdba2bb +size 62394 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_shaded.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_shaded.png new file mode 100644 index 0000000000000000000000000000000000000000..e398c9f7e0d4f8f2a6b6b60067ae285742b4ac36 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_shaded.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f671135051c34a7df8f2ea5261833839fe40b5acb7aa39447cd9131c7d61478e +size 35638 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_zsort_inf.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_zsort_inf.png new file mode 100644 index 0000000000000000000000000000000000000000..052a1cd2c544621b171eef0a0d1f6a850b175d90 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/surface3d_zsort_inf.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:419598e1226a9495323c125735760b541d6dc5df3ed93e7ae3d01a6182ce4902 +size 89421 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/text3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/text3d.png new file mode 100644 index 0000000000000000000000000000000000000000..4ceac972cec440cd8b02faedb6f8615a4dadbe69 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/text3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e4d193840079e71722f65f84c33b805773a3dd0e0ce995e453f66ebca67fc0d +size 75710 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/tricontour.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/tricontour.png new file mode 100644 index 0000000000000000000000000000000000000000..70454ec263b759b0c42b4a7c331bc5e8bab05a6d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/tricontour.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68900735ffb0006ea8e76530d29d128f0c1a7904ba47b9ad12704da667140972 +size 55114 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d.png new file mode 100644 index 0000000000000000000000000000000000000000..e00a9b122b3f00ee13fc28a849dc49e9e6943fcb --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f28f5c18bde9822ec19d434d3260f42f7a691751a4ed728dbfc5cfbfe762ae4f +size 69781 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d_shaded.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d_shaded.png new file mode 100644 index 0000000000000000000000000000000000000000..dbffbf108265549cadf0b8baf5ac03f2fae52399 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/trisurf3d_shaded.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c6620cc0bf9862f4ec847af453d2e4c0ecb03c53d480f930f999fe6807eca3a +size 68460 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-alpha.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..6d2810eec4ca72ea0598ebdd1862728d6090c77e --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-alpha.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17f5b9ba23559ba29a2f26bc4b347d4bfc0078adeac59160f57bf17dc410ec0d +size 83656 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-edge-style.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-edge-style.png new file mode 100644 index 0000000000000000000000000000000000000000..e9b77692f70c5ceaa9086c8c50c101db1e583ffd --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-edge-style.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50faf0bb24eb71c4b5445a3fc88d6d6d6f76a0d84f382cb6edbb3bbc518a3836 +size 66447 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-named-colors.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-named-colors.png new file mode 100644 index 0000000000000000000000000000000000000000..28112247d6fd07d07d3d4106261133d8a78398cb --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-named-colors.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd2043cd86336058e1a14c706e0bbb522299fc39f674883621d8b50b87cdc908 +size 59278 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-rgb-data.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-rgb-data.png new file mode 100644 index 0000000000000000000000000000000000000000..b552d111536557f834f2b7b5ec09a91fd45d01c2 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-rgb-data.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d5699497ca781b553bfae75f5500166c9ff3f5a2b5b6e3475a18bcf8ae88686 +size 64292 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-simple.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-simple.png new file mode 100644 index 0000000000000000000000000000000000000000..2eddbf4f25bb2f42b02d3d4efea4df83ec4f6c75 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-simple.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3a31d0d9b8191e8dfbf53bf533356c95ff11f57e854e7493cbff4f812be2afc +size 43374 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-xyz.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-xyz.png new file mode 100644 index 0000000000000000000000000000000000000000..5811c5e1d985a818abcb6499170caad72dd9e1fe --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/voxels-xyz.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b993d1b52a4ca4273f6bdd7ed135e101b045073edfcbef12384818878dedff41 +size 94388 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3d.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3d.png new file mode 100644 index 0000000000000000000000000000000000000000..b99f2046bf84fe77cca1044bb0c4652818c37ed3 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3d.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:026e8b7bb4bc534aadbfb0fa5471c696a401d3d02225411d766457a85b525bde +size 90091 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerocstride.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerocstride.png new file mode 100644 index 0000000000000000000000000000000000000000..560f3730218869140a92eee631d00edfaff0edaa --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerocstride.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afccfc7c4d60bf0e3ba1194bccfa670c2ff385bf15f0f972c6e5740eba672c9f +size 77750 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerorstride.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerorstride.png new file mode 100644 index 0000000000000000000000000000000000000000..f7464c1942365bd81f77a3c2068ed879101e2d2d --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_axes3d/wireframe3dzerorstride.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba745e37827e60c0cd241013fd3b15b63d79a3af58c9ca5edb0aef20e61cce3a +size 78264 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/fancy.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/fancy.png new file mode 100644 index 0000000000000000000000000000000000000000..d1d06999306eea24bdc2beb7ba6238f58fc06acf --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/fancy.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d1ddc7abde47af8e780f4b74df7613928f7be4859822626db6664d37c4d7eb9 +size 58706 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_bar.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_bar.png new file mode 100644 index 0000000000000000000000000000000000000000..9c2c6b68a39a6cad0e6de02dffef08d5fdd57238 --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_bar.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f8404f5142f3e67316f9e0b44d360ff4c2eb5a7fc67b1d3f53523f0c72ce074 +size 53492 diff --git a/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_plot.png b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_plot.png new file mode 100644 index 0000000000000000000000000000000000000000..189277db0df45b745a280d413776367827bdfa6b --- /dev/null +++ b/testbed/matplotlib__matplotlib/lib/mpl_toolkits/mplot3d/tests/baseline_images/test_legend3d/legend_plot.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d26f784d4fd4b3b438de13d3008ad056e4cc4c766db1ad0bdee10c5b605a7158 +size 46476 diff --git a/testbed/mwaskom__seaborn/doc/_static/favicon_old.ico b/testbed/mwaskom__seaborn/doc/_static/favicon_old.ico new file mode 100644 index 0000000000000000000000000000000000000000..41b638500601d1d4462593859b1f846a88c50c83 --- /dev/null +++ b/testbed/mwaskom__seaborn/doc/_static/favicon_old.ico @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2fe93dc4f3da1f38d492fe84eb2e28e0bf802168a00b467e972138abc26f28be +size 270398 diff --git a/testbed/mwaskom__seaborn/doc/_static/logo-tall-whitebg.png b/testbed/mwaskom__seaborn/doc/_static/logo-tall-whitebg.png new file mode 100644 index 0000000000000000000000000000000000000000..ad870b8499378120f9fc33e8a55b1508c0e9a9a7 --- /dev/null +++ b/testbed/mwaskom__seaborn/doc/_static/logo-tall-whitebg.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff860a9fd71f53bdac2e00e26eacd431784d353810a5054f3c61292a29cce57c +size 90210