chore: remove dumprelated.py and loadrelated.py (#8412)

* fix: correctly disconnect post_save signal

* chore: remove dumprelated.py and loadrelated.py

These have not been used in quite some time and would need
a careful review before trusting. Taking them out to avoid
accidents.
This commit is contained in:
Jennifer Richards 2025-01-10 19:02:47 -04:00 committed by GitHub
parent e108a3ca2c
commit e1af5e7049
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 0 additions and 338 deletions

View file

@ -1,209 +0,0 @@
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import io
import warnings
from collections import OrderedDict
from django.apps import apps
from django.contrib.admin.utils import NestedObjects
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import parse_apps_and_model_labels
from django.db import DEFAULT_DB_ALIAS, router
import debug # pyflakes:ignore
debug.debug = True
class ProxyModelWarning(Warning):
pass
class Command(BaseCommand):
help = (
"Output a database object and its related objects as a fixture of the given format "
)
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='app_label.ModelName', nargs=1,
help='Specifies the app_label.ModelName for which to dump objects given by --pks',
)
parser.add_argument(
'--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.',
)
parser.add_argument(
'--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.',
)
parser.add_argument(
'--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.',
)
parser.add_argument(
'-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).',
)
parser.add_argument(
'--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.',
)
parser.add_argument(
'--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.',
)
parser.add_argument(
'-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.'
)
parser.add_argument(
'--pks', dest='primary_keys', required=True,
help="Only dump objects with given primary keys. Accepts a comma-separated "
"list of keys. This option only works when you specify one model.",
)
def handle(self, *app_labels, **options):
format = options['format']
indent = options['indent']
using = options['database']
excludes = options['exclude']
output = options['output']
show_traceback = options['traceback']
use_natural_foreign_keys = options['use_natural_foreign_keys']
use_natural_primary_keys = options['use_natural_primary_keys']
pks = options['primary_keys']
if pks:
primary_keys = [pk.strip() for pk in pks.split(',')]
else:
primary_keys = []
excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict(
(app_config, None) for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps
)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def flatten(l):
if isinstance(l, list):
for el in l:
if isinstance(el, list):
for sub in flatten(el):
yield sub
else:
yield el
else:
yield l
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
models = serializers.sort_dependencies(list(app_list.items()))
for model in models:
if model in excluded_models:
continue
if model._meta.proxy and model._meta.proxy_for_model not in models:
warnings.warn(
"%s is a proxy model and won't be serialized." % model._meta.label,
category=ProxyModelWarning,
)
if not model._meta.proxy and router.allow_migrate_model(using, model):
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
collector = NestedObjects(using=using)
collector.collect([obj,])
object_list = list(flatten(collector.nested()))
object_list.reverse()
for o in object_list:
yield o
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = io.open(output, 'w') if output else None
try:
serializers.serialize(
format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count,
)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)

View file

@ -1,129 +0,0 @@
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import gzip
import os
#import sys
import tqdm
import zipfile
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
from django.core.exceptions import ObjectDoesNotExist
from django.core import serializers
from django.db import DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections
from django.db.models.signals import post_save
from django.utils.encoding import force_str
import django.core.management.commands.loaddata as loaddata
import debug # pyflakes:ignore
from ietf.community.signals import notify_of_events_receiver
class Command(loaddata.Command):
help = ("""
Load a fixture of related objects to the database. The fixture is expected
to contain a set of related objects, created with the 'dumprelated' management
command. It differs from the 'loaddata' command in that it silently ignores
attempts to load duplicate entries, and continues loading subsequent entries.
""")
def add_arguments(self, parser):
parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture files.')
parser.add_argument(
'--database', action='store', dest='database', default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to load fixtures into. Defaults to the "default" database.',
)
parser.add_argument(
'--ignorenonexistent', '-i', action='store_true', dest='ignore', default=False,
help='Ignores entries in the serialized data for fields that do not '
'currently exist on the model.',
)
def handle(self, *args, **options):
self.ignore = options['ignore']
self.using = options['database']
self.verbosity = options['verbosity']
#
self.compression_formats = {
None: (open, 'rb'),
'gz': (gzip.GzipFile, 'rb'),
'zip': (SingleZipReader, 'r'),
}
if has_bz2:
self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
#
self.serialization_formats = serializers.get_public_serializer_formats()
#
post_save.disconnect(notify_of_events_receiver())
#
connection = connections[self.using]
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
#
for arg in args:
fixture_file = arg
self.stdout.write("Loading objects from %s" % fixture_file)
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
objects_in_fixture = 0
self.stdout.write("Getting object count...\b\b\b", ending='')
self.stdout.flush()
for o in serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,):
objects_in_fixture += 1
self.stdout.write(" %d" % objects_in_fixture)
#
fixture = open_method(fixture_file, mode)
self.fixture_count += 1
objects = serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,)
with connection.constraint_checks_disabled():
for obj in tqdm.tqdm(objects, total=objects_in_fixture):
try:
obj.save(using=self.using)
self.loaded_object_count += 1
except (DatabaseError, IntegrityError, ObjectDoesNotExist, AttributeError) as e:
error_msg = force_str(e)
if "Duplicate entry" in error_msg:
pass
else:
self.stderr.write("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
'app_label': obj.object._meta.app_label,
'object_name': obj.object._meta.object_name,
'pk': obj.object.pk,
'error_msg': error_msg,
}, )
self.fixture_object_count += objects_in_fixture
if self.verbosity >= 1:
if self.fixture_object_count == self.loaded_object_count:
self.stdout.write(
"Installed %d object(s) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_count)
)
else:
self.stdout.write(
"Installed %d object(s) (of %d) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_object_count, self.fixture_count)
)
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])