Merge branch 'dev' into pr/5190
This commit is contained in:
@@ -14,6 +14,7 @@ from PIL import Image
|
||||
|
||||
from documents.converters import convert_from_tiff_to_pdf
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.models import Tag
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
from documents.plugins.helpers import ProgressStatusOptions
|
||||
@@ -65,7 +66,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
supported_mimes = {"application/pdf"}
|
||||
|
||||
return (
|
||||
settings.CONSUMER_ENABLE_ASN_BARCODE or settings.CONSUMER_ENABLE_BARCODES
|
||||
settings.CONSUMER_ENABLE_ASN_BARCODE
|
||||
or settings.CONSUMER_ENABLE_BARCODES
|
||||
or settings.CONSUMER_ENABLE_TAG_BARCODE
|
||||
) and self.input_doc.mime_type in supported_mimes
|
||||
|
||||
def setup(self):
|
||||
@@ -90,6 +93,16 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||
self.metadata.asn = located_asn
|
||||
|
||||
# try reading tags from barcodes
|
||||
if settings.CONSUMER_ENABLE_TAG_BARCODE:
|
||||
tags = self.tags
|
||||
if tags is not None and len(tags) > 0:
|
||||
if self.metadata.tag_ids:
|
||||
self.metadata.tag_ids += tags
|
||||
else:
|
||||
self.metadata.tag_ids = tags
|
||||
logger.info(f"Found tags in barcode: {tags}")
|
||||
|
||||
separator_pages = self.get_separation_pages()
|
||||
if not separator_pages:
|
||||
return "No pages to split on!"
|
||||
@@ -279,6 +292,53 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
return asn
|
||||
|
||||
@property
|
||||
def tags(self) -> Optional[list[int]]:
|
||||
"""
|
||||
Search the parsed barcodes for any tags.
|
||||
Returns the detected tag ids (or empty list)
|
||||
"""
|
||||
tags = []
|
||||
|
||||
# Ensure the barcodes have been read
|
||||
self.detect()
|
||||
|
||||
for x in self.barcodes:
|
||||
tag_texts = x.value
|
||||
|
||||
for raw in tag_texts.split(","):
|
||||
try:
|
||||
tag = None
|
||||
for regex in settings.CONSUMER_TAG_BARCODE_MAPPING:
|
||||
if re.match(regex, raw, flags=re.IGNORECASE):
|
||||
sub = settings.CONSUMER_TAG_BARCODE_MAPPING[regex]
|
||||
tag = (
|
||||
re.sub(regex, sub, raw, flags=re.IGNORECASE)
|
||||
if sub
|
||||
else raw
|
||||
)
|
||||
break
|
||||
|
||||
if tag:
|
||||
tag = Tag.objects.get_or_create(
|
||||
name__iexact=tag,
|
||||
defaults={"name": tag},
|
||||
)[0]
|
||||
|
||||
logger.debug(
|
||||
f"Found Tag Barcode '{raw}', substituted "
|
||||
f"to '{tag}' and mapped to "
|
||||
f"tag #{tag.pk}.",
|
||||
)
|
||||
tags.append(tag.pk)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to find or create TAG '{raw}' because: {e}",
|
||||
)
|
||||
|
||||
return tags
|
||||
|
||||
def get_separation_pages(self) -> dict[int, bool]:
|
||||
"""
|
||||
Search the parsed barcodes for separators and returns a dict of page
|
||||
|
||||
@@ -90,7 +90,6 @@ def set_suggestions_cache(
|
||||
"""
|
||||
if classifier is not None:
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
print(classifier.last_auto_type_hash)
|
||||
cache.set(
|
||||
doc_key,
|
||||
SuggestionCacheData(
|
||||
|
||||
@@ -4,11 +4,14 @@ import pickle
|
||||
import re
|
||||
import warnings
|
||||
from collections.abc import Iterator
|
||||
from datetime import datetime
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from sklearn.exceptions import InconsistentVersionWarning
|
||||
|
||||
@@ -69,8 +69,6 @@ class Command(ProgressBarMixin, BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
# Detect if we support color
|
||||
color = self.style.ERROR("test") != "test"
|
||||
|
||||
if options["inbox_only"]:
|
||||
queryset = Document.objects.filter(tags__is_inbox_tag=True)
|
||||
@@ -96,7 +94,8 @@ class Command(ProgressBarMixin, BaseCommand):
|
||||
use_first=options["use_first"],
|
||||
suggest=options["suggest"],
|
||||
base_url=options["base_url"],
|
||||
color=color,
|
||||
stdout=self.stdout,
|
||||
style_func=self.style,
|
||||
)
|
||||
|
||||
if options["document_type"]:
|
||||
@@ -108,7 +107,8 @@ class Command(ProgressBarMixin, BaseCommand):
|
||||
use_first=options["use_first"],
|
||||
suggest=options["suggest"],
|
||||
base_url=options["base_url"],
|
||||
color=color,
|
||||
stdout=self.stdout,
|
||||
style_func=self.style,
|
||||
)
|
||||
|
||||
if options["tags"]:
|
||||
@@ -119,7 +119,8 @@ class Command(ProgressBarMixin, BaseCommand):
|
||||
replace=options["overwrite"],
|
||||
suggest=options["suggest"],
|
||||
base_url=options["base_url"],
|
||||
color=color,
|
||||
stdout=self.stdout,
|
||||
style_func=self.style,
|
||||
)
|
||||
if options["storage_path"]:
|
||||
set_storage_path(
|
||||
@@ -130,5 +131,6 @@ class Command(ProgressBarMixin, BaseCommand):
|
||||
use_first=options["use_first"],
|
||||
suggest=options["suggest"],
|
||||
base_url=options["base_url"],
|
||||
color=color,
|
||||
stdout=self.stdout,
|
||||
style_func=self.style,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ def _process_document(doc_id):
|
||||
if parser_class:
|
||||
parser = parser_class(logging_group=None)
|
||||
else:
|
||||
print(f"{document} No parser for mime type {document.mime_type}")
|
||||
print(f"{document} No parser for mime type {document.mime_type}") # noqa: T201
|
||||
return
|
||||
|
||||
try:
|
||||
|
||||
@@ -5,7 +5,9 @@ from typing import Union
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from channels_redis.pubsub import RedisPubSubChannelLayer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from channels_redis.pubsub import RedisPubSubChannelLayer
|
||||
|
||||
|
||||
class ProgressStatusOptions(str, enum.Enum):
|
||||
|
||||
@@ -18,7 +18,6 @@ from django.db import close_old_connections
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils import termcolors
|
||||
from django.utils import timezone
|
||||
from filelock import FileLock
|
||||
|
||||
@@ -54,6 +53,26 @@ def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
|
||||
document.tags.add(*inbox_tags)
|
||||
|
||||
|
||||
def _suggestion_printer(
|
||||
stdout,
|
||||
style_func,
|
||||
suggestion_type: str,
|
||||
document: Document,
|
||||
selected: MatchingModel,
|
||||
base_url: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Smaller helper to reduce duplication when just outputting suggestions to the console
|
||||
"""
|
||||
doc_str = str(document)
|
||||
if base_url is not None:
|
||||
stdout.write(style_func.SUCCESS(doc_str))
|
||||
stdout.write(style_func.SUCCESS(f"{base_url}/documents/{document.pk}"))
|
||||
else:
|
||||
stdout.write(style_func.SUCCESS(f"{doc_str} [{document.pk}]"))
|
||||
stdout.write(f"Suggest {suggestion_type}: {selected}")
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
sender,
|
||||
document: Document,
|
||||
@@ -63,7 +82,8 @@ def set_correspondent(
|
||||
use_first=True,
|
||||
suggest=False,
|
||||
base_url=None,
|
||||
color=False,
|
||||
stdout=None,
|
||||
style_func=None,
|
||||
**kwargs,
|
||||
):
|
||||
if document.correspondent and not replace:
|
||||
@@ -90,23 +110,14 @@ def set_correspondent(
|
||||
|
||||
if selected or replace:
|
||||
if suggest:
|
||||
if base_url:
|
||||
print(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document),
|
||||
)
|
||||
print(f"{base_url}/documents/{document.pk}")
|
||||
else:
|
||||
print(
|
||||
(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document)
|
||||
)
|
||||
+ f" [{document.pk}]",
|
||||
)
|
||||
print(f"Suggest correspondent {selected}")
|
||||
_suggestion_printer(
|
||||
stdout,
|
||||
style_func,
|
||||
"correspondent",
|
||||
document,
|
||||
selected,
|
||||
base_url,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Assigning correspondent {selected} to {document}",
|
||||
@@ -126,7 +137,8 @@ def set_document_type(
|
||||
use_first=True,
|
||||
suggest=False,
|
||||
base_url=None,
|
||||
color=False,
|
||||
stdout=None,
|
||||
style_func=None,
|
||||
**kwargs,
|
||||
):
|
||||
if document.document_type and not replace:
|
||||
@@ -154,23 +166,14 @@ def set_document_type(
|
||||
|
||||
if selected or replace:
|
||||
if suggest:
|
||||
if base_url:
|
||||
print(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document),
|
||||
)
|
||||
print(f"{base_url}/documents/{document.pk}")
|
||||
else:
|
||||
print(
|
||||
(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document)
|
||||
)
|
||||
+ f" [{document.pk}]",
|
||||
)
|
||||
print(f"Suggest document type {selected}")
|
||||
_suggestion_printer(
|
||||
stdout,
|
||||
style_func,
|
||||
"document type",
|
||||
document,
|
||||
selected,
|
||||
base_url,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Assigning document type {selected} to {document}",
|
||||
@@ -189,7 +192,8 @@ def set_tags(
|
||||
replace=False,
|
||||
suggest=False,
|
||||
base_url=None,
|
||||
color=False,
|
||||
stdout=None,
|
||||
style_func=None,
|
||||
**kwargs,
|
||||
):
|
||||
if replace:
|
||||
@@ -212,26 +216,16 @@ def set_tags(
|
||||
]
|
||||
if not relevant_tags and not extra_tags:
|
||||
return
|
||||
doc_str = style_func.SUCCESS(str(document))
|
||||
if base_url:
|
||||
print(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document),
|
||||
)
|
||||
print(f"{base_url}/documents/{document.pk}")
|
||||
stdout.write(doc_str)
|
||||
stdout.write(f"{base_url}/documents/{document.pk}")
|
||||
else:
|
||||
print(
|
||||
(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document)
|
||||
)
|
||||
+ f" [{document.pk}]",
|
||||
)
|
||||
stdout.write(doc_str + style_func.SUCCESS(f" [{document.pk}]"))
|
||||
if relevant_tags:
|
||||
print("Suggest tags: " + ", ".join([t.name for t in relevant_tags]))
|
||||
stdout.write("Suggest tags: " + ", ".join([t.name for t in relevant_tags]))
|
||||
if extra_tags:
|
||||
print("Extra tags: " + ", ".join([t.name for t in extra_tags]))
|
||||
stdout.write("Extra tags: " + ", ".join([t.name for t in extra_tags]))
|
||||
else:
|
||||
if not relevant_tags:
|
||||
return
|
||||
@@ -254,7 +248,8 @@ def set_storage_path(
|
||||
use_first=True,
|
||||
suggest=False,
|
||||
base_url=None,
|
||||
color=False,
|
||||
stdout=None,
|
||||
style_func=None,
|
||||
**kwargs,
|
||||
):
|
||||
if document.storage_path and not replace:
|
||||
@@ -285,23 +280,14 @@ def set_storage_path(
|
||||
|
||||
if selected or replace:
|
||||
if suggest:
|
||||
if base_url:
|
||||
print(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document),
|
||||
)
|
||||
print(f"{base_url}/documents/{document.pk}")
|
||||
else:
|
||||
print(
|
||||
(
|
||||
termcolors.colorize(str(document), fg="green")
|
||||
if color
|
||||
else str(document)
|
||||
)
|
||||
+ f" [{document.pk}]",
|
||||
)
|
||||
print(f"Suggest storage directory {selected}")
|
||||
_suggestion_printer(
|
||||
stdout,
|
||||
style_func,
|
||||
"storage directory",
|
||||
document,
|
||||
selected,
|
||||
base_url,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Assigning storage path {selected} to {document}",
|
||||
|
||||
@@ -246,8 +246,6 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.doc3.title = "Title 2 - Doc 3"
|
||||
self.doc3.save()
|
||||
print(self.doc3.archive_path)
|
||||
print(self.doc3.archive_filename)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
|
||||
@@ -14,6 +14,7 @@ from documents.barcodes import BarcodePlugin
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Tag
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import DocumentConsumeDelayMixin
|
||||
from documents.tests.utils import DummyProgressManager
|
||||
@@ -741,3 +742,125 @@ class TestBarcodeZxing(TestBarcode):
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
|
||||
class TestAsnBarcodesZxing(TestAsnBarcode):
|
||||
pass
|
||||
|
||||
|
||||
class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, TestCase):
|
||||
@contextmanager
|
||||
def get_reader(self, filepath: Path) -> BarcodePlugin:
|
||||
reader = BarcodePlugin(
|
||||
ConsumableDocument(DocumentSource.ConsumeFolder, original_file=filepath),
|
||||
DocumentMetadataOverrides(),
|
||||
DummyProgressManager(filepath.name, None),
|
||||
self.dirs.scratch_dir,
|
||||
"task-id",
|
||||
)
|
||||
reader.setup()
|
||||
yield reader
|
||||
reader.cleanup()
|
||||
|
||||
@override_settings(CONSUMER_ENABLE_TAG_BARCODE=True)
|
||||
def test_scan_file_without_matching_barcodes(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing tag barcodes but none with matching prefix (default "TAG:")
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- No TAG has been created
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-custom-prefix.pdf"
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.run()
|
||||
tags = reader.metadata.tag_ids
|
||||
self.assertEqual(tags, None)
|
||||
|
||||
@override_settings(
|
||||
CONSUMER_ENABLE_TAG_BARCODE=False,
|
||||
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
|
||||
)
|
||||
def test_scan_file_with_matching_barcode_but_function_disabled(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing a tag barcode with matching custom prefix
|
||||
- The tag barcode functionality is disabled
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- No TAG has been created
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-custom-prefix.pdf"
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.run()
|
||||
tags = reader.metadata.tag_ids
|
||||
self.assertEqual(tags, None)
|
||||
|
||||
@override_settings(
|
||||
CONSUMER_ENABLE_TAG_BARCODE=True,
|
||||
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<1>"},
|
||||
)
|
||||
def test_scan_file_for_tag_custom_prefix(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing a tag barcode with custom prefix
|
||||
- The barcode mapping accepts this prefix and removes it from the mapped tag value
|
||||
- The created tag is the non-prefixed values
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The TAG is located
|
||||
- One TAG has been created
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-custom-prefix.pdf"
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.metadata.tag_ids = [99]
|
||||
reader.run()
|
||||
self.assertEqual(reader.pdf_file, test_file)
|
||||
tags = reader.metadata.tag_ids
|
||||
self.assertEqual(len(tags), 2)
|
||||
self.assertEqual(tags[0], 99)
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00123").pk, tags[1])
|
||||
|
||||
@override_settings(
|
||||
CONSUMER_ENABLE_TAG_BARCODE=True,
|
||||
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "\\g<1>"},
|
||||
)
|
||||
def test_scan_file_for_many_custom_tags(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing multiple tag barcode with custom prefix
|
||||
- The barcode mapping accepts this prefix and removes it from the mapped tag value
|
||||
- The created tags are the non-prefixed values
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The TAG is located
|
||||
- File Tags have been created
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "split-by-asn-1.pdf"
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.run()
|
||||
tags = reader.metadata.tag_ids
|
||||
self.assertEqual(len(tags), 5)
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00123").pk, tags[0])
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00124").pk, tags[1])
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00125").pk, tags[2])
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00126").pk, tags[3])
|
||||
self.assertEqual(Tag.objects.get(name__iexact="00127").pk, tags[4])
|
||||
|
||||
@override_settings(
|
||||
CONSUMER_ENABLE_TAG_BARCODE=True,
|
||||
CONSUMER_TAG_BARCODE_MAPPING={"CUSTOM-PREFIX-(.*)": "\\g<3>"},
|
||||
)
|
||||
def test_scan_file_for_tag_raises_value_error(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Any error occurs during tag barcode processing
|
||||
THEN:
|
||||
- The processing should be skipped and not break the import
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-custom-prefix.pdf"
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.run()
|
||||
# expect error to be caught and logged only
|
||||
tags = reader.metadata.tag_ids
|
||||
self.assertEqual(tags, None)
|
||||
|
||||
@@ -88,10 +88,10 @@ class ConsumerThreadMixin(DocumentConsumeDelayMixin):
|
||||
):
|
||||
eq = filecmp.cmp(input_doc.original_file, self.sample_file, shallow=False)
|
||||
if not eq:
|
||||
print("Consumed an INVALID file.")
|
||||
print("Consumed an INVALID file.") # noqa: T201
|
||||
raise ConsumerError("Incomplete File READ FAILED")
|
||||
else:
|
||||
print("Consumed a perfectly valid file.")
|
||||
print("Consumed a perfectly valid file.") # noqa: T201
|
||||
|
||||
def slow_write_file(self, target, incomplete=False):
|
||||
with open(self.sample_file, "rb") as f:
|
||||
@@ -102,11 +102,11 @@ class ConsumerThreadMixin(DocumentConsumeDelayMixin):
|
||||
|
||||
with open(target, "wb") as f:
|
||||
# this will take 2 seconds, since the file is about 20k.
|
||||
print("Start writing file.")
|
||||
print("Start writing file.") # noqa: T201
|
||||
for b in chunked(1000, pdf_bytes):
|
||||
f.write(b)
|
||||
sleep(0.1)
|
||||
print("file completed.")
|
||||
print("file completed.") # noqa: T201
|
||||
|
||||
|
||||
@override_settings(
|
||||
|
||||
@@ -196,7 +196,7 @@ class TestFuzzyMatchCommand(TestCase):
|
||||
self.assertEqual(Document.objects.count(), 3)
|
||||
|
||||
stdout, _ = self.call_command("--delete")
|
||||
print(stdout)
|
||||
|
||||
lines = [x.strip() for x in stdout.split("\n") if len(x.strip())]
|
||||
self.assertEqual(len(lines), 3)
|
||||
self.assertEqual(
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest import mock
|
||||
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import QuerySet
|
||||
from django.utils import timezone
|
||||
from guardian.shortcuts import assign_perm
|
||||
from guardian.shortcuts import get_groups_with_perms
|
||||
from guardian.shortcuts import get_users_with_perms
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from documents import tasks
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentSource
|
||||
|
||||
@@ -340,7 +340,6 @@ class DummyProgressManager:
|
||||
def __init__(self, filename: str, task_id: Optional[str] = None) -> None:
|
||||
self.filename = filename
|
||||
self.task_id = task_id
|
||||
print("hello world")
|
||||
self.payloads = []
|
||||
|
||||
def __enter__(self):
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
|
||||
@@ -6,6 +8,8 @@ from django.http import HttpRequest
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from rest_framework import authentication
|
||||
|
||||
logger = logging.getLogger("paperless.auth")
|
||||
|
||||
|
||||
class AutoLoginMiddleware(MiddlewareMixin):
|
||||
def process_request(self, request: HttpRequest):
|
||||
@@ -35,7 +39,7 @@ class AngularApiAuthenticationOverride(authentication.BaseAuthentication):
|
||||
and request.headers["Referer"].startswith("http://localhost:4200/")
|
||||
):
|
||||
user = User.objects.filter(is_staff=True).first()
|
||||
print(f"Auto-Login with user {user}")
|
||||
logger.debug(f"Auto-Login with user {user}")
|
||||
return (user, None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -796,6 +796,11 @@ CACHES = {
|
||||
},
|
||||
}
|
||||
|
||||
if DEBUG and os.getenv("PAPERLESS_CACHE_BACKEND") is None:
|
||||
CACHES["default"][
|
||||
"BACKEND"
|
||||
] = "django.core.cache.backends.locmem.LocMemCache" # pragma: no cover
|
||||
|
||||
|
||||
def default_threads_per_worker(task_workers) -> int:
|
||||
# always leave one core open
|
||||
@@ -878,6 +883,19 @@ CONSUMER_BARCODE_UPSCALE: Final[float] = __get_float(
|
||||
|
||||
CONSUMER_BARCODE_DPI: Final[int] = __get_int("PAPERLESS_CONSUMER_BARCODE_DPI", 300)
|
||||
|
||||
CONSUMER_ENABLE_TAG_BARCODE: Final[bool] = __get_boolean(
|
||||
"PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE",
|
||||
)
|
||||
|
||||
CONSUMER_TAG_BARCODE_MAPPING = dict(
|
||||
json.loads(
|
||||
os.getenv(
|
||||
"PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING",
|
||||
'{"TAG:(.*)": "\\\\g<1>"}',
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = __get_boolean(
|
||||
"PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED",
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user