Implement document updated workflow trigger
This commit is contained in:
parent
264d9f97fc
commit
220e9993a1
@ -9,9 +9,11 @@ class DocumentsConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
from documents.signals import document_consumption_finished
|
from documents.signals import document_consumption_finished
|
||||||
|
from documents.signals import document_updated
|
||||||
from documents.signals.handlers import add_inbox_tags
|
from documents.signals.handlers import add_inbox_tags
|
||||||
from documents.signals.handlers import add_to_index
|
from documents.signals.handlers import add_to_index
|
||||||
from documents.signals.handlers import run_workflows
|
from documents.signals.handlers import run_workflow_added
|
||||||
|
from documents.signals.handlers import run_workflow_updated
|
||||||
from documents.signals.handlers import set_correspondent
|
from documents.signals.handlers import set_correspondent
|
||||||
from documents.signals.handlers import set_document_type
|
from documents.signals.handlers import set_document_type
|
||||||
from documents.signals.handlers import set_log_entry
|
from documents.signals.handlers import set_log_entry
|
||||||
@ -25,6 +27,7 @@ class DocumentsConfig(AppConfig):
|
|||||||
document_consumption_finished.connect(set_storage_path)
|
document_consumption_finished.connect(set_storage_path)
|
||||||
document_consumption_finished.connect(set_log_entry)
|
document_consumption_finished.connect(set_log_entry)
|
||||||
document_consumption_finished.connect(add_to_index)
|
document_consumption_finished.connect(add_to_index)
|
||||||
document_consumption_finished.connect(run_workflows)
|
document_consumption_finished.connect(run_workflow_added)
|
||||||
|
document_updated.connect(run_workflow_updated)
|
||||||
|
|
||||||
AppConfig.ready(self)
|
AppConfig.ready(self)
|
||||||
|
@ -3,7 +3,6 @@ import re
|
|||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
|
|
||||||
from documents.classifier import DocumentClassifier
|
from documents.classifier import DocumentClassifier
|
||||||
from documents.data_models import ConsumableDocument
|
|
||||||
from documents.data_models import DocumentSource
|
from documents.data_models import DocumentSource
|
||||||
from documents.models import Correspondent
|
from documents.models import Correspondent
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
@ -239,7 +238,7 @@ def _split_match(matching_model):
|
|||||||
|
|
||||||
|
|
||||||
def document_matches_workflow(
|
def document_matches_workflow(
|
||||||
document: ConsumableDocument | Document,
|
document, # ConsumableDocument | Document
|
||||||
workflow: Workflow,
|
workflow: Workflow,
|
||||||
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
@ -404,7 +404,7 @@ class Migration(migrations.Migration):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"filter_has_correspondent",
|
"filter_has_document_type",
|
||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
@ -414,7 +414,7 @@ class Migration(migrations.Migration):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"filter_has_document_type",
|
"filter_has_correspondent",
|
||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
|
@ -949,7 +949,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
verbose_name=_("has these tag(s)"),
|
verbose_name=_("has these tag(s)"),
|
||||||
)
|
)
|
||||||
|
|
||||||
filter_has_correspondent = models.ForeignKey(
|
filter_has_document_type = models.ForeignKey(
|
||||||
DocumentType,
|
DocumentType,
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
@ -957,7 +957,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
verbose_name=_("has this document type"),
|
verbose_name=_("has this document type"),
|
||||||
)
|
)
|
||||||
|
|
||||||
filter_has_document_type = models.ForeignKey(
|
filter_has_correspondent = models.ForeignKey(
|
||||||
Correspondent,
|
Correspondent,
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
|
@ -3,3 +3,4 @@ from django.dispatch import Signal
|
|||||||
document_consumption_started = Signal()
|
document_consumption_started = Signal()
|
||||||
document_consumption_finished = Signal()
|
document_consumption_finished = Signal()
|
||||||
document_consumer_declaration = Signal()
|
document_consumer_declaration = Signal()
|
||||||
|
document_updated = Signal()
|
||||||
|
@ -519,14 +519,22 @@ def add_to_index(sender, document, **kwargs):
|
|||||||
index.add_or_update_document(document)
|
index.add_or_update_document(document)
|
||||||
|
|
||||||
|
|
||||||
def run_workflows(sender, document: Document, logging_group=None, **kwargs):
|
def run_workflow_added(sender, document: Document, logging_group=None, **kwargs):
|
||||||
|
run_workflow(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED, document)
|
||||||
|
|
||||||
|
|
||||||
|
def run_workflow_updated(sender, document: Document, logging_group=None, **kwargs):
|
||||||
|
run_workflow(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, document)
|
||||||
|
|
||||||
|
|
||||||
|
def run_workflow(trigger_type: WorkflowTrigger.WorkflowTriggerType, document: Document):
|
||||||
for workflow in Workflow.objects.filter(
|
for workflow in Workflow.objects.filter(
|
||||||
triggers__type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
triggers__type=trigger_type,
|
||||||
).order_by("order"):
|
).order_by("order"):
|
||||||
if matching.document_matches_workflow(
|
if matching.document_matches_workflow(
|
||||||
document,
|
document,
|
||||||
workflow,
|
workflow,
|
||||||
WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
trigger_type,
|
||||||
):
|
):
|
||||||
for action in workflow.actions.all():
|
for action in workflow.actions.all():
|
||||||
if action.assign_tags.all().count() > 0:
|
if action.assign_tags.all().count() > 0:
|
||||||
|
@ -36,6 +36,7 @@ from documents.models import Tag
|
|||||||
from documents.parsers import DocumentParser
|
from documents.parsers import DocumentParser
|
||||||
from documents.parsers import get_parser_class_for_mime_type
|
from documents.parsers import get_parser_class_for_mime_type
|
||||||
from documents.sanity_checker import SanityCheckFailedException
|
from documents.sanity_checker import SanityCheckFailedException
|
||||||
|
from documents.signals import document_updated
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
import json
|
import json
|
||||||
@ -215,6 +216,10 @@ def bulk_update_documents(document_ids):
|
|||||||
ix = index.open_index()
|
ix = index.open_index()
|
||||||
|
|
||||||
for doc in documents:
|
for doc in documents:
|
||||||
|
document_updated.send(
|
||||||
|
sender=None,
|
||||||
|
document=doc,
|
||||||
|
)
|
||||||
post_save.send(Document, instance=doc, created=False)
|
post_save.send(Document, instance=doc, created=False)
|
||||||
|
|
||||||
with AsyncWriter(ix) as writer:
|
with AsyncWriter(ix) as writer:
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import TestCase
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from django.contrib.auth.models import Group
|
from django.contrib.auth.models import Group
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from documents import tasks
|
from documents import tasks
|
||||||
from documents.data_models import ConsumableDocument
|
from documents.data_models import ConsumableDocument
|
||||||
@ -28,7 +28,7 @@ from paperless_mail.models import MailRule
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||||
SAMPLE_DIR = Path(__file__).parent / "samples"
|
SAMPLE_DIR = Path(__file__).parent / "samples"
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
@ -626,7 +626,6 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_document_added_workflow(self):
|
def test_document_added_workflow(self):
|
||||||
trigger = WorkflowTrigger.objects.create(
|
trigger = WorkflowTrigger.objects.create(
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||||
sources=f"{DocumentSource.ApiUpload},{DocumentSource.ConsumeFolder},{DocumentSource.MailFetch}",
|
|
||||||
filter_filename="*sample*",
|
filter_filename="*sample*",
|
||||||
)
|
)
|
||||||
action = WorkflowAction.objects.create(
|
action = WorkflowAction.objects.create(
|
||||||
@ -671,3 +670,35 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
|
|
||||||
self.assertEqual(doc.correspondent, self.c2)
|
self.assertEqual(doc.correspondent, self.c2)
|
||||||
self.assertEqual(doc.title, f"Doc created in {created.year}")
|
self.assertEqual(doc.title, f"Doc created in {created.year}")
|
||||||
|
|
||||||
|
def test_document_updated_workflow(self):
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
filter_has_document_type=self.dt,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create()
|
||||||
|
action.assign_custom_fields.add(self.cf1)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
)
|
||||||
|
|
||||||
|
superuser = User.objects.create_superuser("superuser")
|
||||||
|
self.client.force_authenticate(user=superuser)
|
||||||
|
|
||||||
|
self.client.patch(
|
||||||
|
f"/api/documents/{doc.id}/",
|
||||||
|
{"document_type": self.dt.id},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(doc.custom_fields.all().count(), 1)
|
||||||
|
@ -116,6 +116,7 @@ from documents.serialisers import UiSettingsViewSerializer
|
|||||||
from documents.serialisers import WorkflowActionSerializer
|
from documents.serialisers import WorkflowActionSerializer
|
||||||
from documents.serialisers import WorkflowSerializer
|
from documents.serialisers import WorkflowSerializer
|
||||||
from documents.serialisers import WorkflowTriggerSerializer
|
from documents.serialisers import WorkflowTriggerSerializer
|
||||||
|
from documents.signals import document_updated
|
||||||
from documents.tasks import consume_file
|
from documents.tasks import consume_file
|
||||||
from paperless import version
|
from paperless import version
|
||||||
from paperless.db import GnuPG
|
from paperless.db import GnuPG
|
||||||
@ -324,6 +325,12 @@ class DocumentViewSet(
|
|||||||
from documents import index
|
from documents import index
|
||||||
|
|
||||||
index.add_or_update_document(self.get_object())
|
index.add_or_update_document(self.get_object())
|
||||||
|
|
||||||
|
document_updated.send(
|
||||||
|
sender=self.__class__,
|
||||||
|
document=self.get_object(),
|
||||||
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user