Changing the formatting to ruff-format
This commit is contained in:
parent
3facdefa40
commit
eeabc82358
@ -50,10 +50,7 @@ repos:
|
||||
rev: 'v0.4.4'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
- id: ruff-format
|
||||
# Dockerfile hooks
|
||||
- repo: https://github.com/AleksaC/hadolint-py
|
||||
rev: v2.12.0.3
|
||||
|
@ -4,6 +4,7 @@ Simple script which attempts to ping the Redis broker as set in the environment
|
||||
a certain number of times, waiting a little bit in between
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
@ -107,7 +107,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
if settings.CONSUMER_ENABLE_BARCODES and (
|
||||
separator_pages := self.get_separation_pages()
|
||||
):
|
||||
|
||||
# We have pages to split against
|
||||
|
||||
# Note this does NOT use the base_temp_dir, as that will be removed
|
||||
|
@ -25,7 +25,6 @@ logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
|
||||
def set_correspondent(doc_ids: list[int], correspondent):
|
||||
|
||||
if correspondent:
|
||||
correspondent = Correspondent.objects.only("pk").get(id=correspondent)
|
||||
|
||||
@ -81,7 +80,6 @@ def set_document_type(doc_ids: list[int], document_type):
|
||||
|
||||
|
||||
def add_tag(doc_ids: list[int], tag: int):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag)).only("pk")
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
|
||||
@ -97,7 +95,6 @@ def add_tag(doc_ids: list[int], tag: int):
|
||||
|
||||
|
||||
def remove_tag(doc_ids: list[int], tag: int):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag)).only("pk")
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
|
||||
|
@ -151,14 +151,17 @@ class Command(BaseCommand):
|
||||
|
||||
self._check_manifest_valid()
|
||||
|
||||
with disable_signal(
|
||||
post_save,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document,
|
||||
), disable_signal(
|
||||
m2m_changed,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document.tags.through,
|
||||
with (
|
||||
disable_signal(
|
||||
post_save,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document,
|
||||
),
|
||||
disable_signal(
|
||||
m2m_changed,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document.tags.through,
|
||||
),
|
||||
):
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
auditlog.unregister(Document)
|
||||
|
@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1044_workflow_workflowaction_workflowtrigger_and_more"),
|
||||
]
|
||||
|
@ -7,7 +7,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
|
@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1046_workflowaction_remove_all_correspondents_and_more"),
|
||||
]
|
||||
|
@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1047_savedview_display_mode_and_more"),
|
||||
]
|
||||
|
@ -93,7 +93,9 @@ class MatchingModelSerializer(serializers.ModelSerializer):
|
||||
owner = (
|
||||
data["owner"]
|
||||
if "owner" in data
|
||||
else self.user if hasattr(self, "user") else None
|
||||
else self.user
|
||||
if hasattr(self, "user")
|
||||
else None
|
||||
)
|
||||
pk = self.instance.pk if hasattr(self.instance, "pk") else None
|
||||
if ("name" in data or "owner" in data) and self.Meta.model.objects.filter(
|
||||
|
@ -117,10 +117,13 @@ def consume_file(
|
||||
ConsumerPlugin,
|
||||
]
|
||||
|
||||
with ProgressManager(
|
||||
overrides.filename or input_doc.original_file.name,
|
||||
self.request.id,
|
||||
) as status_mgr, TemporaryDirectory(dir=settings.SCRATCH_DIR) as tmp_dir:
|
||||
with (
|
||||
ProgressManager(
|
||||
overrides.filename or input_doc.original_file.name,
|
||||
self.request.id,
|
||||
) as status_mgr,
|
||||
TemporaryDirectory(dir=settings.SCRATCH_DIR) as tmp_dir,
|
||||
):
|
||||
tmp_dir = Path(tmp_dir)
|
||||
for plugin_class in plugins:
|
||||
plugin_name = plugin_class.NAME
|
||||
|
@ -136,7 +136,6 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
def test_api_add_tag(self, bulk_update_task_mock):
|
||||
|
||||
self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
|
||||
|
||||
response = self.client.post(
|
||||
|
@ -425,7 +425,6 @@ class TestConsumer(
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideTitle(self):
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(title="Override Title"),
|
||||
@ -441,7 +440,6 @@ class TestConsumer(
|
||||
|
||||
def testOverrideTitleInvalidPlaceholders(self):
|
||||
with self.assertLogs("paperless.consumer", level="ERROR") as cm:
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(title="Override {correspondent]"),
|
||||
@ -546,7 +544,6 @@ class TestConsumer(
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideAsn(self):
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(asn=123),
|
||||
@ -614,7 +611,6 @@ class TestConsumer(
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testNotAFile(self):
|
||||
|
||||
with self.get_consumer(Path("non-existing-file")) as consumer:
|
||||
with self.assertRaisesMessage(ConsumerError, "File not found"):
|
||||
consumer.run()
|
||||
@ -725,7 +721,6 @@ class TestConsumer(
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
def testFilenameHandling(self):
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(title="new docs"),
|
||||
@ -1055,7 +1050,6 @@ class PreConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
@override_settings(PRE_CONSUME_SCRIPT="does-not-exist")
|
||||
def test_pre_consume_script_not_found(self, m):
|
||||
with self.get_consumer(self.test_file) as c:
|
||||
|
||||
self.assertRaises(ConsumerError, c.run)
|
||||
m.assert_not_called()
|
||||
|
||||
@ -1254,7 +1248,6 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
os.chmod(script.name, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
with override_settings(POST_CONSUME_SCRIPT=script.name):
|
||||
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
with self.get_consumer(self.test_file) as consumer:
|
||||
with self.assertRaisesRegex(
|
||||
|
@ -10,7 +10,6 @@ from documents.parsers import parse_date_generator
|
||||
|
||||
|
||||
class TestDate(TestCase):
|
||||
|
||||
def test_date_format_1(self):
|
||||
text = "lorem ipsum 130218 lorem ipsum"
|
||||
self.assertEqual(parse_date("", text), None)
|
||||
|
@ -144,9 +144,12 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
# Set a correspondent and save the document
|
||||
document.correspondent = Correspondent.objects.get_or_create(name="test")[0]
|
||||
|
||||
with mock.patch(
|
||||
"documents.signals.handlers.Document.objects.filter",
|
||||
) as m, disable_auditlog():
|
||||
with (
|
||||
mock.patch(
|
||||
"documents.signals.handlers.Document.objects.filter",
|
||||
) as m,
|
||||
disable_auditlog(),
|
||||
):
|
||||
m.side_effect = DatabaseError()
|
||||
document.save()
|
||||
|
||||
|
@ -618,7 +618,6 @@ class DocumentViewSet(
|
||||
|
||||
@action(methods=["get", "post", "delete"], detail=True)
|
||||
def notes(self, request, pk=None):
|
||||
|
||||
currentUser = request.user
|
||||
try:
|
||||
doc = (
|
||||
@ -1337,7 +1336,6 @@ class StatisticsView(APIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get(self, request, format=None):
|
||||
|
||||
user = request.user if request.user is not None else None
|
||||
|
||||
documents = (
|
||||
@ -1533,9 +1531,9 @@ class UiSettingsView(GenericAPIView):
|
||||
if hasattr(user, "ui_settings"):
|
||||
ui_settings = user.ui_settings.settings
|
||||
if "update_checking" in ui_settings:
|
||||
ui_settings["update_checking"][
|
||||
"backend_setting"
|
||||
] = settings.ENABLE_UPDATE_CHECK
|
||||
ui_settings["update_checking"]["backend_setting"] = (
|
||||
settings.ENABLE_UPDATE_CHECK
|
||||
)
|
||||
else:
|
||||
ui_settings["update_checking"] = {
|
||||
"backend_setting": settings.ENABLE_UPDATE_CHECK,
|
||||
|
@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("paperless", "0002_applicationconfiguration_app_logo_and_more"),
|
||||
]
|
||||
|
@ -171,8 +171,7 @@ def _parse_beat_schedule() -> dict:
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 9.0
|
||||
* 60.0,
|
||||
"expires": 9.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -183,8 +182,7 @@ def _parse_beat_schedule() -> dict:
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0
|
||||
* 60.0,
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -195,9 +193,7 @@ def _parse_beat_schedule() -> dict:
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0
|
||||
* 60.0
|
||||
* 60.0,
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -208,9 +204,7 @@ def _parse_beat_schedule() -> dict:
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": ((7.0 * 24.0) - 1.0)
|
||||
* 60.0
|
||||
* 60.0,
|
||||
"expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
@ -822,9 +816,9 @@ CACHES = {
|
||||
}
|
||||
|
||||
if DEBUG and os.getenv("PAPERLESS_CACHE_BACKEND") is None:
|
||||
CACHES["default"][
|
||||
"BACKEND"
|
||||
] = "django.core.cache.backends.locmem.LocMemCache" # pragma: no cover
|
||||
CACHES["default"]["BACKEND"] = (
|
||||
"django.core.cache.backends.locmem.LocMemCache" # pragma: no cover
|
||||
)
|
||||
|
||||
|
||||
def default_threads_per_worker(task_workers) -> int:
|
||||
|
@ -29,7 +29,6 @@ class TestCustomAccountAdapter(TestCase):
|
||||
with context.request_context(request):
|
||||
adapter = get_adapter()
|
||||
with override_settings(ALLOWED_HOSTS=["*"]):
|
||||
|
||||
# True because request host is same
|
||||
url = "https://example.com"
|
||||
self.assertTrue(adapter.is_safe_url(url))
|
||||
|
@ -63,9 +63,12 @@ class MailRule(document_models.ModelWithOwner):
|
||||
class ConsumptionScope(models.IntegerChoices):
|
||||
ATTACHMENTS_ONLY = 1, _("Only process attachments.")
|
||||
EML_ONLY = 2, _("Process full Mail (with embedded attachments in file) as .eml")
|
||||
EVERYTHING = 3, _(
|
||||
"Process full Mail (with embedded attachments in file) as .eml "
|
||||
"+ process attachments as separate documents",
|
||||
EVERYTHING = (
|
||||
3,
|
||||
_(
|
||||
"Process full Mail (with embedded attachments in file) as .eml "
|
||||
"+ process attachments as separate documents",
|
||||
),
|
||||
)
|
||||
|
||||
class AttachmentProcessing(models.IntegerChoices):
|
||||
|
@ -222,10 +222,13 @@ class MailDocumentParser(DocumentParser):
|
||||
|
||||
self.log.debug("Merging email text and HTML content into single PDF")
|
||||
|
||||
with GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client, client.merge.merge() as route:
|
||||
with (
|
||||
GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client,
|
||||
client.merge.merge() as route,
|
||||
):
|
||||
# Configure requested PDF/A formatting, if any
|
||||
pdf_a_format = self._settings_to_gotenberg_pdfa()
|
||||
if pdf_a_format is not None:
|
||||
@ -310,10 +313,13 @@ class MailDocumentParser(DocumentParser):
|
||||
css_file = Path(__file__).parent / "templates" / "output.css"
|
||||
email_html_file = self.mail_to_html(mail)
|
||||
|
||||
with GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client, client.chromium.html_to_pdf() as route:
|
||||
with (
|
||||
GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client,
|
||||
client.chromium.html_to_pdf() as route,
|
||||
):
|
||||
# Configure requested PDF/A formatting, if any
|
||||
pdf_a_format = self._settings_to_gotenberg_pdfa()
|
||||
if pdf_a_format is not None:
|
||||
@ -363,10 +369,13 @@ class MailDocumentParser(DocumentParser):
|
||||
html_clean_file = tempdir / "index.html"
|
||||
html_clean_file.write_text(html_clean)
|
||||
|
||||
with GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client, client.chromium.html_to_pdf() as route:
|
||||
with (
|
||||
GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client,
|
||||
client.chromium.html_to_pdf() as route,
|
||||
):
|
||||
# Configure requested PDF/A formatting, if any
|
||||
pdf_a_format = self._settings_to_gotenberg_pdfa()
|
||||
if pdf_a_format is not None:
|
||||
|
@ -88,10 +88,13 @@ class TikaDocumentParser(DocumentParser):
|
||||
|
||||
self.log.info(f"Converting {document_path} to PDF as {pdf_path}")
|
||||
|
||||
with GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client, client.libre_office.to_pdf() as route:
|
||||
with (
|
||||
GotenbergClient(
|
||||
host=settings.TIKA_GOTENBERG_ENDPOINT,
|
||||
timeout=settings.CELERY_TASK_TIME_LIMIT,
|
||||
) as client,
|
||||
client.libre_office.to_pdf() as route,
|
||||
):
|
||||
# Set the output format of the resulting PDF
|
||||
if settings.OCR_OUTPUT_TYPE in {
|
||||
OutputTypeChoices.PDF_A,
|
||||
|
Loading…
x
Reference in New Issue
Block a user