Clear the cache keys more aggressivly when a document is highly likely to be changing
This commit is contained in:
parent
c1b5a6598c
commit
ea343b1ca4
@ -189,13 +189,21 @@ def refresh_metadata_cache(
|
||||
cache.touch(doc_key, timeout)
|
||||
|
||||
|
||||
def clear_metadata_cache(document_id: int) -> None:
|
||||
doc_key = get_metadata_cache_key(document_id)
|
||||
cache.delete(doc_key)
|
||||
|
||||
|
||||
def get_thumbnail_modified_key(document_id: int) -> str:
|
||||
"""
|
||||
Builds the key to store a thumbnail's timestamp
|
||||
"""
|
||||
return f"doc_{document_id}_thumbnail_modified"
|
||||
|
||||
|
||||
def clear_document_caches(document_id: int) -> None:
|
||||
"""
|
||||
Removes all cached items for the given document
|
||||
"""
|
||||
cache.delete_many(
|
||||
[
|
||||
get_suggestion_cache_key(document_id),
|
||||
get_metadata_cache_key(document_id),
|
||||
get_thumbnail_modified_key(document_id),
|
||||
],
|
||||
)
|
||||
|
@ -23,7 +23,7 @@ from filelock import FileLock
|
||||
from guardian.shortcuts import remove_perm
|
||||
|
||||
from documents import matching
|
||||
from documents.caching import clear_metadata_cache
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.consumer import parse_doc_title_w_placeholders
|
||||
from documents.file_handling import create_source_path_directory
|
||||
@ -439,7 +439,8 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
||||
archive_filename=instance.archive_filename,
|
||||
modified=timezone.now(),
|
||||
)
|
||||
clear_metadata_cache(instance.pk)
|
||||
# Clear any caching for this document. Slightly overkill, but not terrible
|
||||
clear_document_caches(instance.pk)
|
||||
|
||||
except (OSError, DatabaseError, CannotMoveFilesException) as e:
|
||||
logger.warning(f"Exception during file handling: {e}")
|
||||
|
@ -18,6 +18,7 @@ from whoosh.writing import AsyncWriter
|
||||
from documents import index
|
||||
from documents import sanity_checker
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.classifier import load_classifier
|
||||
from documents.consumer import Consumer
|
||||
@ -213,6 +214,7 @@ def bulk_update_documents(document_ids):
|
||||
ix = index.open_index()
|
||||
|
||||
for doc in documents:
|
||||
clear_document_caches(doc.pk)
|
||||
document_updated.send(
|
||||
sender=None,
|
||||
document=doc,
|
||||
@ -305,6 +307,8 @@ def update_document_archive_file(document_id):
|
||||
with index.open_index_writer() as writer:
|
||||
index.update_document(writer, document)
|
||||
|
||||
clear_document_caches(document.pk)
|
||||
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Error while parsing document {document} (ID: {document_id})",
|
||||
|
Loading…
x
Reference in New Issue
Block a user