mirror of
https://github.com/netbox-community/netbox.git
synced 2025-08-24 16:26:09 -06:00
Performance improvements for reindexing
This commit is contained in:
parent
6eb2983ccd
commit
530f5180b8
@ -52,6 +52,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Clear all cached values for the specified models
|
||||
self.stdout.write('Clearing cached values... ', ending='')
|
||||
self.stdout.flush()
|
||||
content_types = [
|
||||
ContentType.objects.get_for_model(model) for model in indexers.keys()
|
||||
]
|
||||
@ -59,11 +60,13 @@ class Command(BaseCommand):
|
||||
self.stdout.write(f'{deleted_count} entries deleted.')
|
||||
|
||||
# Index models
|
||||
self.stdout.write('Indexing models')
|
||||
for model, idx in indexers.items():
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
self.stdout.write(f'Reindexing {app_label}.{model_name}... ', ending='')
|
||||
i = search_backend.cache(model.objects.iterator())
|
||||
self.stdout.write(f' {app_label}.{model_name}... ', ending='')
|
||||
self.stdout.flush()
|
||||
i = search_backend.cache(model.objects.iterator(), remove_existing=False)
|
||||
if i:
|
||||
self.stdout.write(f'{i} entries cached.')
|
||||
else:
|
||||
@ -72,4 +75,4 @@ class Command(BaseCommand):
|
||||
msg = f'Completed.'
|
||||
if total_count := search_backend.size:
|
||||
msg += f' Total entries: {total_count}'
|
||||
self.stdout.write(msg)
|
||||
self.stdout.write(msg, self.style.SUCCESS)
|
||||
|
@ -156,20 +156,24 @@ class CachedValueSearchBackend(SearchBackend):
|
||||
counter = 0
|
||||
for instance in instances:
|
||||
|
||||
# First item
|
||||
if not counter:
|
||||
|
||||
# Determine the indexer
|
||||
if indexer is None:
|
||||
try:
|
||||
indexer = get_indexer(instance)
|
||||
except KeyError:
|
||||
break
|
||||
|
||||
# Prefetch any associated custom fields
|
||||
content_type = ContentType.objects.get_for_model(indexer.model)
|
||||
custom_fields = CustomField.objects.filter(content_types=content_type).exclude(search_weight=0)
|
||||
|
||||
# Wipe out any previously cached values for the object
|
||||
if remove_existing:
|
||||
cls.remove(instance)
|
||||
|
||||
# Determine the indexer
|
||||
if indexer is None:
|
||||
try:
|
||||
indexer = get_indexer(instance)
|
||||
content_type = ContentType.objects.get_for_model(indexer.model)
|
||||
custom_fields = CustomField.objects.filter(content_types=content_type).exclude(search_weight=0)
|
||||
except KeyError:
|
||||
# No indexer has been registered for this model
|
||||
continue
|
||||
|
||||
# Generate cache data
|
||||
for field in indexer.to_cache(instance, custom_fields=custom_fields):
|
||||
buffer.append(
|
||||
@ -203,15 +207,19 @@ class CachedValueSearchBackend(SearchBackend):
|
||||
return
|
||||
|
||||
ct = ContentType.objects.get_for_model(instance)
|
||||
CachedValue.objects.filter(object_type=ct, object_id=instance.pk).delete()
|
||||
qs = CachedValue.objects.filter(object_type=ct, object_id=instance.pk)
|
||||
|
||||
# Call _raw_delete() on the queryset to avoid first loading instances into memory
|
||||
return qs._raw_delete(using=qs.db)
|
||||
|
||||
@classmethod
|
||||
def clear(cls, object_types=None):
|
||||
qs = CachedValue.objects.all()
|
||||
if object_types:
|
||||
del_count, _ = CachedValue.objects.filter(object_type__in=object_types).delete()
|
||||
else:
|
||||
del_count, _ = CachedValue.objects.all().delete()
|
||||
return del_count
|
||||
qs = qs.filter(object_type__in=object_types)
|
||||
|
||||
# Call _raw_delete() on the queryset to avoid first loading instances into memory
|
||||
return qs._raw_delete(using=qs.db)
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
|
Loading…
Reference in New Issue
Block a user