fix:create-document
This commit is contained in:
@@ -13,6 +13,7 @@ from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import ShareLink
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Warehouse
|
||||
from documents.models import Tag
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
@@ -38,6 +39,10 @@ class DocumentTypeAdmin(GuardedModelAdmin):
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
class WarehouseAdmin(GuardedModelAdmin):
|
||||
list_display = ("name", "type", "path", "parent_warehouse", "match", "matching_algorithm")
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
class DocumentAdmin(GuardedModelAdmin):
|
||||
search_fields = ("correspondent__name", "title", "content", "tags__name")
|
||||
@@ -188,6 +193,7 @@ class CustomFieldInstancesAdmin(GuardedModelAdmin):
|
||||
admin.site.register(Correspondent, CorrespondentAdmin)
|
||||
admin.site.register(Tag, TagAdmin)
|
||||
admin.site.register(DocumentType, DocumentTypeAdmin)
|
||||
admin.site.register(Warehouse, WarehouseAdmin)
|
||||
admin.site.register(Document, DocumentAdmin)
|
||||
admin.site.register(SavedView, SavedViewAdmin)
|
||||
admin.site.register(StoragePath, StoragePathAdmin)
|
||||
|
||||
@@ -15,6 +15,7 @@ class DocumentsConfig(AppConfig):
|
||||
from documents.signals.handlers import run_workflow_added
|
||||
from documents.signals.handlers import run_workflow_updated
|
||||
from documents.signals.handlers import set_correspondent
|
||||
from documents.signals.handlers import set_warehouse
|
||||
from documents.signals.handlers import set_document_type
|
||||
from documents.signals.handlers import set_log_entry
|
||||
from documents.signals.handlers import set_storage_path
|
||||
@@ -22,6 +23,7 @@ class DocumentsConfig(AppConfig):
|
||||
|
||||
document_consumption_finished.connect(add_inbox_tags)
|
||||
document_consumption_finished.connect(set_correspondent)
|
||||
document_consumption_finished.connect(set_warehouse)
|
||||
document_consumption_finished.connect(set_document_type)
|
||||
document_consumption_finished.connect(set_tags)
|
||||
document_consumption_finished.connect(set_storage_path)
|
||||
|
||||
@@ -15,6 +15,7 @@ from documents.models import Correspondent
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Warehouse
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.tasks import bulk_update_documents
|
||||
from documents.tasks import consume_file
|
||||
@@ -52,6 +53,22 @@ def set_storage_path(doc_ids, storage_path):
|
||||
|
||||
return "OK"
|
||||
|
||||
def set_warehouse(doc_ids, warehouse):
|
||||
if warehouse:
|
||||
warehouse = Warehouse.objects.get(id=warehouse)
|
||||
|
||||
qs = Document.objects.filter(
|
||||
Q(id__in=doc_ids) & ~Q(warehouse=warehouse),
|
||||
)
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
qs.update(warehouse=warehouse)
|
||||
|
||||
bulk_update_documents.delay(
|
||||
document_ids=affected_docs,
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def set_document_type(doc_ids, document_type):
|
||||
if document_type:
|
||||
|
||||
@@ -86,6 +86,7 @@ class DocumentClassifier:
|
||||
self.tags_binarizer = None
|
||||
self.tags_classifier = None
|
||||
self.correspondent_classifier = None
|
||||
self.warehouse_classifier = None
|
||||
self.document_type_classifier = None
|
||||
self.storage_path_classifier = None
|
||||
|
||||
@@ -112,6 +113,7 @@ class DocumentClassifier:
|
||||
|
||||
self.tags_classifier = pickle.load(f)
|
||||
self.correspondent_classifier = pickle.load(f)
|
||||
self.warehouse_classifier = pickle.load(f)
|
||||
self.document_type_classifier = pickle.load(f)
|
||||
self.storage_path_classifier = pickle.load(f)
|
||||
except Exception as err:
|
||||
@@ -148,6 +150,7 @@ class DocumentClassifier:
|
||||
pickle.dump(self.tags_classifier, f)
|
||||
|
||||
pickle.dump(self.correspondent_classifier, f)
|
||||
pickle.dump(self.warehouse_classifier, f)
|
||||
pickle.dump(self.document_type_classifier, f)
|
||||
pickle.dump(self.storage_path_classifier, f)
|
||||
|
||||
@@ -165,6 +168,7 @@ class DocumentClassifier:
|
||||
|
||||
labels_tags = []
|
||||
labels_correspondent = []
|
||||
labels_warehouse = []
|
||||
labels_document_type = []
|
||||
labels_storage_path = []
|
||||
|
||||
@@ -185,6 +189,13 @@ class DocumentClassifier:
|
||||
y = cor.pk
|
||||
hasher.update(y.to_bytes(4, "little", signed=True))
|
||||
labels_correspondent.append(y)
|
||||
|
||||
y = -1
|
||||
wh = doc.warehouse
|
||||
if wh and wh.matching_algorithm == MatchingModel.MATCH_AUTO:
|
||||
y = wh.pk
|
||||
hasher.update(y.to_bytes(4, "little", signed=True))
|
||||
labels_warehouse.append(y)
|
||||
|
||||
tags = sorted(
|
||||
tag.pk
|
||||
@@ -234,10 +245,11 @@ class DocumentClassifier:
|
||||
# it usually is.
|
||||
num_correspondents = len(set(labels_correspondent) | {-1}) - 1
|
||||
num_document_types = len(set(labels_document_type) | {-1}) - 1
|
||||
num_warehouses = len(set(labels_warehouse) | {-1}) - 1
|
||||
num_storage_paths = len(set(labels_storage_path) | {-1}) - 1
|
||||
|
||||
logger.debug(
|
||||
f"{docs_queryset.count()} documents, {num_tags} tag(s), {num_correspondents} correspondent(s), "
|
||||
f"{docs_queryset.count()} documents, {num_tags} tag(s), {num_correspondents} correspondent(s), {num_warehouses} warehouse(s) "
|
||||
f"{num_document_types} document type(s). {num_storage_paths} storage path(es)",
|
||||
)
|
||||
|
||||
@@ -304,6 +316,17 @@ class DocumentClassifier:
|
||||
"classifier.",
|
||||
)
|
||||
|
||||
if num_warehouses > 0:
|
||||
logger.debug("Training warehouse classifier...")
|
||||
self.warehouse_classifier = MLPClassifier(tol=0.01)
|
||||
self.warehouse_classifier.fit(data_vectorized, labels_warehouse)
|
||||
else:
|
||||
self.warehouse_classifier = None
|
||||
logger.debug(
|
||||
"There are no warehouses. Not training warehouse "
|
||||
"classifier.",
|
||||
)
|
||||
|
||||
if num_document_types > 0:
|
||||
logger.debug("Training document type classifier...")
|
||||
self.document_type_classifier = MLPClassifier(tol=0.01)
|
||||
@@ -414,6 +437,17 @@ class DocumentClassifier:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
def predict_warehouse(self, content: str) -> Optional[int]:
|
||||
if self.warehouse_classifier:
|
||||
X = self.data_vectorizer.transform([self.preprocess_content(content)])
|
||||
warehouse_id = self.warehouse_classifier.predict(X)
|
||||
if warehouse_id != -1:
|
||||
return warehouse_id
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
def predict_document_type(self, content: str) -> Optional[int]:
|
||||
if self.document_type_classifier:
|
||||
|
||||
@@ -32,6 +32,7 @@ from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import FileInfo
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Warehouse
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
@@ -76,6 +77,7 @@ class WorkflowTriggerPlugin(
|
||||
.prefetch_related("actions__assign_custom_fields")
|
||||
.prefetch_related("actions__remove_tags")
|
||||
.prefetch_related("actions__remove_correspondents")
|
||||
.prefetch_related("actions__remove_warehouses")
|
||||
.prefetch_related("actions__remove_document_types")
|
||||
.prefetch_related("actions__remove_storage_paths")
|
||||
.prefetch_related("actions__remove_custom_fields")
|
||||
@@ -110,6 +112,10 @@ class WorkflowTriggerPlugin(
|
||||
action_overrides.document_type_id = (
|
||||
action.assign_document_type.pk
|
||||
)
|
||||
if action.assign_warehouse is not None:
|
||||
action_overrides.warehouse_id = (
|
||||
action.assign_warehouse.pk
|
||||
)
|
||||
if action.assign_storage_path is not None:
|
||||
action_overrides.storage_path_id = (
|
||||
action.assign_storage_path.pk
|
||||
@@ -298,6 +304,7 @@ class Consumer(LoggingMixin):
|
||||
self.filename = None
|
||||
self.override_title = None
|
||||
self.override_correspondent_id = None
|
||||
self.override_warehouse_id = None
|
||||
self.override_tag_ids = None
|
||||
self.override_document_type_id = None
|
||||
self.override_asn = None
|
||||
@@ -494,6 +501,7 @@ class Consumer(LoggingMixin):
|
||||
override_correspondent_id=None,
|
||||
override_document_type_id=None,
|
||||
override_tag_ids=None,
|
||||
override_warehouse_id=None,
|
||||
override_storage_path_id=None,
|
||||
task_id=None,
|
||||
override_created=None,
|
||||
@@ -515,6 +523,7 @@ class Consumer(LoggingMixin):
|
||||
self.override_correspondent_id = override_correspondent_id
|
||||
self.override_document_type_id = override_document_type_id
|
||||
self.override_tag_ids = override_tag_ids
|
||||
self.override_warehouse_id = override_warehouse_id
|
||||
self.override_storage_path_id = override_storage_path_id
|
||||
self.task_id = task_id or str(uuid.uuid4())
|
||||
self.override_created = override_created
|
||||
@@ -873,6 +882,11 @@ class Consumer(LoggingMixin):
|
||||
document.storage_path = StoragePath.objects.get(
|
||||
pk=self.override_storage_path_id,
|
||||
)
|
||||
|
||||
if self.override_warehouse_id:
|
||||
document.warehouse = Warehouse.objects.get(
|
||||
pk=self.override_warehouse_id,
|
||||
)
|
||||
|
||||
if self.override_asn:
|
||||
document.archive_serial_number = self.override_asn
|
||||
|
||||
@@ -16,13 +16,14 @@ class DocumentMetadataOverrides:
|
||||
be set from content or matching. All fields default to None,
|
||||
meaning no override is happening
|
||||
"""
|
||||
|
||||
|
||||
filename: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
correspondent_id: Optional[int] = None
|
||||
document_type_id: Optional[int] = None
|
||||
tag_ids: Optional[list[int]] = None
|
||||
storage_path_id: Optional[int] = None
|
||||
warehouse_id: Optional[int] = None
|
||||
created: Optional[datetime.datetime] = None
|
||||
asn: Optional[int] = None
|
||||
owner_id: Optional[int] = None
|
||||
@@ -48,6 +49,8 @@ class DocumentMetadataOverrides:
|
||||
self.document_type_id = other.document_type_id
|
||||
if other.storage_path_id is not None:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.warehouse_id is not None:
|
||||
self.warehouse_id = other.warehouse_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
|
||||
@@ -100,6 +103,7 @@ class DocumentMetadataOverrides:
|
||||
overrides.correspondent_id = doc.correspondent.id if doc.correspondent else None
|
||||
overrides.document_type_id = doc.document_type.id if doc.document_type else None
|
||||
overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None
|
||||
overrides.warehouse_id = doc.warehouse.id if doc.warehouse else None
|
||||
overrides.owner_id = doc.owner.id if doc.owner else None
|
||||
overrides.tag_ids = list(doc.tags.values_list("id", flat=True))
|
||||
|
||||
|
||||
@@ -174,6 +174,14 @@ def generate_filename(
|
||||
)
|
||||
else:
|
||||
document_type = no_value_default
|
||||
|
||||
if doc.warehouse:
|
||||
warehouse = pathvalidate.sanitize_filename(
|
||||
doc.warehouse.name,
|
||||
replacement_text="-",
|
||||
)
|
||||
else:
|
||||
warehouse = no_value_default
|
||||
|
||||
if doc.archive_serial_number:
|
||||
asn = str(doc.archive_serial_number)
|
||||
@@ -199,6 +207,7 @@ def generate_filename(
|
||||
title=pathvalidate.sanitize_filename(doc.title, replacement_text="-"),
|
||||
correspondent=correspondent,
|
||||
document_type=document_type,
|
||||
warehouse=warehouse,
|
||||
created=local_created.isoformat(),
|
||||
created_year=local_created.strftime("%Y"),
|
||||
created_year_short=local_created.strftime("%y"),
|
||||
|
||||
@@ -192,7 +192,7 @@ class DocumentFilterSet(FilterSet):
|
||||
|
||||
storage_path__id__none = ObjectFilter(field_name="storage_path", exclude=True)
|
||||
|
||||
warehouses__id__none = ObjectFilter(field_name="warehouses", exclude=True)
|
||||
warehouse__id__none = ObjectFilter(field_name="warehouse", exclude=True)
|
||||
|
||||
is_in_inbox = InboxFilter()
|
||||
|
||||
@@ -227,9 +227,9 @@ class DocumentFilterSet(FilterSet):
|
||||
"storage_path": ["isnull"],
|
||||
"storage_path__id": ID_KWARGS,
|
||||
"storage_path__name": CHAR_KWARGS,
|
||||
"warehouses": ["isnull"],
|
||||
"warehouses__id": ID_KWARGS,
|
||||
"warehouses__name": CHAR_KWARGS,
|
||||
"warehouse": ["isnull"],
|
||||
"warehouse__id": ID_KWARGS,
|
||||
"warehouse__name": CHAR_KWARGS,
|
||||
"owner": ["isnull"],
|
||||
"owner__id": ID_KWARGS,
|
||||
"custom_fields": ["icontains"],
|
||||
|
||||
@@ -60,6 +60,9 @@ def get_schema():
|
||||
type=TEXT(sortable=True),
|
||||
type_id=NUMERIC(),
|
||||
has_type=BOOLEAN(),
|
||||
warehouse=TEXT(sortable=True),
|
||||
warehouse_id=NUMERIC(),
|
||||
has_warehouse=BOOLEAN(),
|
||||
created=DATETIME(sortable=True),
|
||||
modified=DATETIME(sortable=True),
|
||||
added=DATETIME(sortable=True),
|
||||
@@ -155,6 +158,9 @@ def update_document(writer: AsyncWriter, doc: Document):
|
||||
type=doc.document_type.name if doc.document_type else None,
|
||||
type_id=doc.document_type.id if doc.document_type else None,
|
||||
has_type=doc.document_type is not None,
|
||||
warehouse=doc.warehouse.name if doc.warehouse else None,
|
||||
warehouse_id=doc.warehouse.id if doc.warehouse else None,
|
||||
has_warehouse=doc.warehouse is not None,
|
||||
created=doc.created,
|
||||
added=doc.added,
|
||||
asn=asn,
|
||||
@@ -197,6 +203,7 @@ def remove_document_from_index(document: Document):
|
||||
class DelayedQuery:
|
||||
param_map = {
|
||||
"correspondent": ("correspondent", ["id", "id__in", "id__none", "isnull"]),
|
||||
"warehouse": ("warehouse", ["id", "id__in", "id__none", "isnull"]),
|
||||
"document_type": ("type", ["id", "id__in", "id__none", "isnull"]),
|
||||
"storage_path": ("path", ["id", "id__in", "id__none", "isnull"]),
|
||||
"owner": ("owner", ["id", "id__in", "id__none", "isnull"]),
|
||||
|
||||
@@ -7,6 +7,7 @@ from documents.classifier import DocumentClassifier
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Correspondent
|
||||
from documents.models import Warehouse
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import MatchingModel
|
||||
@@ -56,6 +57,28 @@ def match_correspondents(document: Document, classifier: DocumentClassifier, use
|
||||
),
|
||||
)
|
||||
|
||||
def match_warehouses(document: Document, classifier: DocumentClassifier, user=None):
|
||||
pred_id = classifier.predict_warehouse(document.content) if classifier else None
|
||||
|
||||
if user is None and document.owner is not None:
|
||||
user = document.owner
|
||||
|
||||
if user is not None:
|
||||
warehouses = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_warehouse",
|
||||
Warehouse,
|
||||
)
|
||||
else:
|
||||
warehouses = Warehouse.objects.all()
|
||||
|
||||
return list(
|
||||
filter(
|
||||
lambda o: matches(o, document)
|
||||
or (o.pk == pred_id and o.matching_algorithm == MatchingModel.MATCH_AUTO),
|
||||
warehouses,
|
||||
),
|
||||
)
|
||||
|
||||
def match_document_types(document: Document, classifier: DocumentClassifier, user=None):
|
||||
pred_id = classifier.predict_document_type(document.content) if classifier else None
|
||||
@@ -356,6 +379,16 @@ def existing_document_matches_workflow(
|
||||
f"Document correspondent {document.correspondent} does not match {trigger.filter_has_correspondent}",
|
||||
)
|
||||
trigger_matched = False
|
||||
|
||||
# Document warehouse vs trigger has_warehouse
|
||||
if (
|
||||
trigger.filter_has_warehouse is not None
|
||||
and document.warehouse != trigger.filter_has_warehouse
|
||||
):
|
||||
reason = (
|
||||
f"Document warehouse {document.warehouse} does not match {trigger.filter_has_warehouse}",
|
||||
)
|
||||
trigger_matched = False
|
||||
|
||||
# Document document_type vs trigger has_document_type
|
||||
if (
|
||||
|
||||
18
src/documents/migrations/1052_warehouse_path.py
Normal file
18
src/documents/migrations/1052_warehouse_path.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.11 on 2024-05-30 07:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1051_alter_warehouse_options_warehouse_is_insensitive_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='warehouse',
|
||||
name='path',
|
||||
field=models.TextField(blank=True, null=True, verbose_name='path'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.11 on 2024-05-30 12:43
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1052_warehouse_path'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='document',
|
||||
name='warehouses',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='document',
|
||||
name='warehouse',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='documents', to='documents.warehouse', verbose_name='warehouse'),
|
||||
),
|
||||
]
|
||||
@@ -144,6 +144,7 @@ class Warehouse(MatchingModel):
|
||||
choices=TYPE_WAREHOUSE,
|
||||
default=WAREHOUSE,)
|
||||
parent_warehouse = models.ForeignKey('self', on_delete=models.CASCADE, null=True, blank=True )
|
||||
path = models.TextField(_("path"), null=True, blank=True)
|
||||
|
||||
class Meta(MatchingModel.Meta):
|
||||
verbose_name = _("warehouse")
|
||||
@@ -177,6 +178,15 @@ class Document(ModelWithOwner):
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("storage path"),
|
||||
)
|
||||
|
||||
warehouse = models.ForeignKey(
|
||||
Warehouse,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="documents",
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("warehouse"),
|
||||
)
|
||||
|
||||
title = models.CharField(_("title"), max_length=128, blank=True, db_index=True)
|
||||
|
||||
@@ -207,15 +217,6 @@ class Document(ModelWithOwner):
|
||||
verbose_name=_("tags"),
|
||||
)
|
||||
|
||||
warehouses = models.ForeignKey(
|
||||
Warehouse,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="documents",
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("warehouses"),
|
||||
)
|
||||
|
||||
checksum = models.CharField(
|
||||
_("checksum"),
|
||||
max_length=32,
|
||||
|
||||
@@ -428,7 +428,7 @@ class TagsField(serializers.PrimaryKeyRelatedField):
|
||||
def get_queryset(self):
|
||||
return Tag.objects.all()
|
||||
|
||||
class WarehousesField(serializers.PrimaryKeyRelatedField):
|
||||
class WarehouseField(serializers.PrimaryKeyRelatedField):
|
||||
def get_queryset(self):
|
||||
return Warehouse.objects.all()
|
||||
|
||||
@@ -656,7 +656,7 @@ class DocumentSerializer(
|
||||
):
|
||||
correspondent = CorrespondentField(allow_null=True)
|
||||
tags = TagsField(many=True)
|
||||
warehouses = WarehousesField(allow_null=True)
|
||||
warehouse = WarehouseField(allow_null=True)
|
||||
document_type = DocumentTypeField(allow_null=True)
|
||||
storage_path = StoragePathField(allow_null=True)
|
||||
|
||||
@@ -775,10 +775,10 @@ class DocumentSerializer(
|
||||
"correspondent",
|
||||
"document_type",
|
||||
"storage_path",
|
||||
"warehouse",
|
||||
"title",
|
||||
"content",
|
||||
"tags",
|
||||
"warehouses",
|
||||
"created",
|
||||
"created_date",
|
||||
"modified",
|
||||
@@ -882,6 +882,7 @@ class BulkEditSerializer(
|
||||
"set_correspondent",
|
||||
"set_document_type",
|
||||
"set_storage_path",
|
||||
"set_warehouse"
|
||||
"add_tag",
|
||||
"remove_tag",
|
||||
"modify_tags",
|
||||
@@ -916,6 +917,8 @@ class BulkEditSerializer(
|
||||
return bulk_edit.set_document_type
|
||||
elif method == "set_storage_path":
|
||||
return bulk_edit.set_storage_path
|
||||
elif method == "set_warehouse":
|
||||
return bulk_edit.set_warehouse
|
||||
elif method == "add_tag":
|
||||
return bulk_edit.add_tag
|
||||
elif method == "remove_tag":
|
||||
@@ -971,6 +974,17 @@ class BulkEditSerializer(
|
||||
raise serializers.ValidationError("Correspondent does not exist")
|
||||
else:
|
||||
raise serializers.ValidationError("correspondent not specified")
|
||||
def _validate_parameters_warehouse(self, parameters):
|
||||
if "warehouse" in parameters:
|
||||
warehouse_id = parameters["warehouse"]
|
||||
if warehouse_id is None:
|
||||
return
|
||||
try:
|
||||
Warehouse.objects.get(id=warehouse_id)
|
||||
except Warehouse.DoesNotExist:
|
||||
raise serializers.ValidationError("Warehouse does not exist")
|
||||
else:
|
||||
raise serializers.ValidationError("warehouse not specified")
|
||||
|
||||
def _validate_storage_path(self, parameters):
|
||||
if "storage_path" in parameters:
|
||||
@@ -1059,6 +1073,8 @@ class BulkEditSerializer(
|
||||
self._validate_parameters_modify_tags(parameters)
|
||||
elif method == bulk_edit.set_storage_path:
|
||||
self._validate_storage_path(parameters)
|
||||
elif method == bulk_edit.set_warehouse:
|
||||
self._validate_parameters_warehouse(parameters)
|
||||
elif method == bulk_edit.set_permissions:
|
||||
self._validate_parameters_set_permissions(parameters)
|
||||
elif method == bulk_edit.rotate:
|
||||
@@ -1107,6 +1123,14 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
warehouse = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Warehouse.objects.all(),
|
||||
label="Warehouse",
|
||||
allow_null=True,
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
storage_path = serializers.PrimaryKeyRelatedField(
|
||||
queryset=StoragePath.objects.all(),
|
||||
@@ -1168,6 +1192,12 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
return storage_path.id
|
||||
else:
|
||||
return None
|
||||
|
||||
def validate_warehouse(self, warehouse):
|
||||
if warehouse:
|
||||
return warehouse.id
|
||||
else:
|
||||
return None
|
||||
|
||||
def validate_tags(self, tags):
|
||||
if tags:
|
||||
@@ -1232,6 +1262,7 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
title="title",
|
||||
correspondent="correspondent",
|
||||
document_type="document_type",
|
||||
warehouse="warehouse",
|
||||
created="created",
|
||||
created_year="created_year",
|
||||
created_year_short="created_year_short",
|
||||
@@ -1504,6 +1535,7 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
|
||||
"filter_has_tags",
|
||||
"filter_has_correspondent",
|
||||
"filter_has_document_type",
|
||||
"filter_has_warehouse",
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
@@ -1540,6 +1572,8 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
assign_tags = TagsField(many=True, allow_null=True, required=False)
|
||||
assign_document_type = DocumentTypeField(allow_null=True, required=False)
|
||||
assign_storage_path = StoragePathField(allow_null=True, required=False)
|
||||
assign_warehouse = WarehouseField(allow_null =True, required=False)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = WorkflowAction
|
||||
@@ -1551,6 +1585,7 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
"assign_correspondent",
|
||||
"assign_document_type",
|
||||
"assign_storage_path",
|
||||
"assign_warehouse"
|
||||
"assign_owner",
|
||||
"assign_view_users",
|
||||
"assign_view_groups",
|
||||
@@ -1565,6 +1600,8 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
"remove_document_types",
|
||||
"remove_all_storage_paths",
|
||||
"remove_storage_paths",
|
||||
"remove_all_warehouses",
|
||||
"remove_warehouses",
|
||||
"remove_custom_fields",
|
||||
"remove_all_custom_fields",
|
||||
"remove_all_owners",
|
||||
@@ -1658,6 +1695,7 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
||||
remove_correspondents = action.pop("remove_correspondents", None)
|
||||
remove_document_types = action.pop("remove_document_types", None)
|
||||
remove_storage_paths = action.pop("remove_storage_paths", None)
|
||||
remove_warehouses = action.pop("remove_warehouses", None)
|
||||
remove_custom_fields = action.pop("remove_custom_fields", None)
|
||||
remove_owners = action.pop("remove_owners", None)
|
||||
remove_view_users = action.pop("remove_view_users", None)
|
||||
@@ -1690,6 +1728,8 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
||||
action_instance.remove_document_types.set(remove_document_types)
|
||||
if remove_storage_paths is not None:
|
||||
action_instance.remove_storage_paths.set(remove_storage_paths)
|
||||
if remove_warehouses is not None:
|
||||
action_instance.remove_warehouses.set(remove_warehouses)
|
||||
if remove_custom_fields is not None:
|
||||
action_instance.remove_custom_fields.set(remove_custom_fields)
|
||||
if remove_owners is not None:
|
||||
@@ -1756,22 +1796,12 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class WarehouseSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
document_count = serializers.SerializerMethodField()
|
||||
def get_document_count(self,obj):
|
||||
document = Document.objects.filter(warehouses=obj).count()
|
||||
return document
|
||||
|
||||
class Meta:
|
||||
model = Warehouse
|
||||
fields = '__all__'
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
if instance.parent_warehouse:
|
||||
data['parent_warehouse'] = WarehouseSerializer(instance.parent_warehouse).data
|
||||
else:
|
||||
data['parent_warehouse'] = None
|
||||
return data
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -130,6 +130,59 @@ def set_correspondent(
|
||||
document.correspondent = selected
|
||||
document.save(update_fields=("correspondent",))
|
||||
|
||||
def set_warehouse(
|
||||
sender,
|
||||
document: Document,
|
||||
logging_group=None,
|
||||
classifier: Optional[DocumentClassifier] = None,
|
||||
replace=False,
|
||||
use_first=True,
|
||||
suggest=False,
|
||||
base_url=None,
|
||||
stdout=None,
|
||||
style_func=None,
|
||||
**kwargs,
|
||||
):
|
||||
if document.warehouse and not replace:
|
||||
return
|
||||
|
||||
potential_warehouses = matching.match_warehouses(document, classifier)
|
||||
|
||||
potential_count = len(potential_warehouses)
|
||||
selected = potential_warehouses[0] if potential_warehouses else None
|
||||
if potential_count > 1:
|
||||
if use_first:
|
||||
logger.debug(
|
||||
f"Detected {potential_count} potential warehouses, "
|
||||
f"so we've opted for {selected}",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Detected {potential_count} potential warehouses, "
|
||||
f"not assigning any warehouse",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
return
|
||||
|
||||
if selected or replace:
|
||||
if suggest:
|
||||
_suggestion_printer(
|
||||
stdout,
|
||||
style_func,
|
||||
"warehouse",
|
||||
document,
|
||||
selected,
|
||||
base_url,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Assigning warehouse {selected} to {document}",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
document.warehouse = selected
|
||||
document.save(update_fields=("warehouse",))
|
||||
|
||||
def set_document_type(
|
||||
sender,
|
||||
@@ -545,6 +598,7 @@ def run_workflow(
|
||||
.prefetch_related("actions__assign_custom_fields")
|
||||
.prefetch_related("actions__remove_tags")
|
||||
.prefetch_related("actions__remove_correspondents")
|
||||
.prefetch_related("actions__remove_warehouses")
|
||||
.prefetch_related("actions__remove_document_types")
|
||||
.prefetch_related("actions__remove_storage_paths")
|
||||
.prefetch_related("actions__remove_custom_fields")
|
||||
@@ -570,6 +624,9 @@ def run_workflow(
|
||||
|
||||
if action.assign_correspondent is not None:
|
||||
document.correspondent = action.assign_correspondent
|
||||
|
||||
if action.assign_warehouse is not None:
|
||||
document.warehouse = action.assign_warehouse
|
||||
|
||||
if action.assign_document_type is not None:
|
||||
document.document_type = action.assign_document_type
|
||||
@@ -594,6 +651,11 @@ def run_workflow(
|
||||
if document.document_type is not None
|
||||
else ""
|
||||
),
|
||||
(
|
||||
document.warehouse.name
|
||||
if document.warehouse is not None
|
||||
else ""
|
||||
),
|
||||
(
|
||||
document.owner.username
|
||||
if document.owner is not None
|
||||
@@ -692,6 +754,16 @@ def run_workflow(
|
||||
)
|
||||
):
|
||||
document.correspondent = None
|
||||
|
||||
if action.remove_all_warehouses or (
|
||||
document.warehouse
|
||||
and (
|
||||
action.remove_warehouses.filter(
|
||||
pk=document.warehouse.pk,
|
||||
).exists()
|
||||
)
|
||||
):
|
||||
document.warehouse = None
|
||||
|
||||
if action.remove_all_document_types or (
|
||||
document.document_type
|
||||
|
||||
@@ -33,6 +33,7 @@ from documents.models import Correspondent
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Warehouse
|
||||
from documents.models import Tag
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
@@ -73,6 +74,7 @@ def train_classifier():
|
||||
not Tag.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
and not DocumentType.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
and not Correspondent.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
and not Warehouse.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
and not StoragePath.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
):
|
||||
logger.info("No automatic matching items, not training")
|
||||
@@ -170,6 +172,7 @@ def consume_file(
|
||||
override_correspondent_id=overrides.correspondent_id,
|
||||
override_document_type_id=overrides.document_type_id,
|
||||
override_tag_ids=overrides.tag_ids,
|
||||
override_warehouse_id=overrides.warehouse_id,
|
||||
override_storage_path_id=overrides.storage_path_id,
|
||||
override_created=overrides.created,
|
||||
override_asn=overrides.asn,
|
||||
|
||||
@@ -104,6 +104,7 @@ from documents.filters import WarehouseFilterSet
|
||||
from documents.matching import match_correspondents
|
||||
from documents.matching import match_document_types
|
||||
from documents.matching import match_storage_paths
|
||||
from documents.matching import match_warehouses
|
||||
from documents.matching import match_tags
|
||||
from documents.models import Correspondent
|
||||
from documents.models import CustomField
|
||||
@@ -336,7 +337,7 @@ class DocumentViewSet(
|
||||
ObjectOwnedOrGrantedPermissionsFilter,
|
||||
)
|
||||
filterset_class = DocumentFilterSet
|
||||
search_fields = ("title", "correspondent__name", "content", "warehouses")
|
||||
search_fields = ("title", "correspondent__name", "content", "warehouse")
|
||||
ordering_fields = (
|
||||
"id",
|
||||
"title",
|
||||
@@ -354,7 +355,7 @@ class DocumentViewSet(
|
||||
return (
|
||||
Document.objects.distinct()
|
||||
.annotate(num_notes=Count("notes"))
|
||||
.select_related("correspondent", "storage_path", "document_type", "owner")
|
||||
.select_related("correspondent", "storage_path", "document_type","warehouse", "owner")
|
||||
.prefetch_related("tags", "custom_fields", "notes")
|
||||
)
|
||||
|
||||
@@ -529,6 +530,9 @@ class DocumentViewSet(
|
||||
"correspondents": [
|
||||
c.id for c in match_correspondents(doc, classifier, request.user)
|
||||
],
|
||||
"warehouses": [
|
||||
wh.id for wh in match_warehouses(doc, classifier, request.user)
|
||||
],
|
||||
"tags": [t.id for t in match_tags(doc, classifier, request.user)],
|
||||
"document_types": [
|
||||
dt.id for dt in match_document_types(doc, classifier, request.user)
|
||||
@@ -749,6 +753,7 @@ class SearchResultSerializer(DocumentSerializer, PassUserMixin):
|
||||
"correspondent",
|
||||
"storage_path",
|
||||
"document_type",
|
||||
"warehouse"
|
||||
"owner",
|
||||
)
|
||||
.prefetch_related("tags", "custom_fields", "notes")
|
||||
@@ -937,6 +942,7 @@ class PostDocumentView(GenericAPIView):
|
||||
correspondent_id = serializer.validated_data.get("correspondent")
|
||||
document_type_id = serializer.validated_data.get("document_type")
|
||||
storage_path_id = serializer.validated_data.get("storage_path")
|
||||
warehouse_id = serializer.validated_data.get("warehouse")
|
||||
tag_ids = serializer.validated_data.get("tags")
|
||||
title = serializer.validated_data.get("title")
|
||||
created = serializer.validated_data.get("created")
|
||||
@@ -965,6 +971,7 @@ class PostDocumentView(GenericAPIView):
|
||||
correspondent_id=correspondent_id,
|
||||
document_type_id=document_type_id,
|
||||
storage_path_id=storage_path_id,
|
||||
warehouse_id=warehouse_id,
|
||||
tag_ids=tag_ids,
|
||||
created=created,
|
||||
asn=archive_serial_number,
|
||||
@@ -1014,6 +1021,12 @@ class SelectionDataView(GenericAPIView):
|
||||
Case(When(documents__id__in=ids, then=1), output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
|
||||
warehouses = Warehouse.objects.annotate(
|
||||
document_count=Count(
|
||||
Case(When(documents__id__in=ids, then=1), output_field=IntegerField()),
|
||||
),
|
||||
)
|
||||
|
||||
r = Response(
|
||||
{
|
||||
@@ -1027,6 +1040,9 @@ class SelectionDataView(GenericAPIView):
|
||||
"selected_document_types": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in types
|
||||
],
|
||||
"selected_warehouses": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in warehouses
|
||||
],
|
||||
"selected_storage_paths": [
|
||||
{"id": t.id, "document_count": t.document_count}
|
||||
for t in storage_paths
|
||||
@@ -1111,6 +1127,17 @@ class StatisticsView(APIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
warehouse_count = (
|
||||
Warehouse.objects.count()
|
||||
if user is None
|
||||
else len(
|
||||
get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_warehouse",
|
||||
Warehouse,
|
||||
),
|
||||
)
|
||||
)
|
||||
storage_path_count = (
|
||||
StoragePath.objects.count()
|
||||
if user is None
|
||||
@@ -1160,6 +1187,7 @@ class StatisticsView(APIView):
|
||||
"correspondent_count": correspondent_count,
|
||||
"document_type_count": document_type_count,
|
||||
"storage_path_count": storage_path_count,
|
||||
"warehouse_count": warehouse_count,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1531,18 +1559,18 @@ class BulkEditObjectsView(PassUserMixin):
|
||||
|
||||
if warehouse.type == Warehouse.SHELF:
|
||||
boxcases = Warehouse.objects.filter(parent_warehouse=warehouse)
|
||||
documents = Document.objects.filter(warehouses__in=[b.id for b in boxcases])
|
||||
documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
|
||||
documents.delete()
|
||||
boxcases.delete()
|
||||
warehouse.delete()
|
||||
if warehouse.type == Warehouse.BOXCASE:
|
||||
documents = Document.objects.filter(warehouses=warehouse)
|
||||
documents = Document.objects.filter(warehouse=warehouse)
|
||||
documents.delete()
|
||||
warehouse.delete()
|
||||
if warehouse.type == Warehouse.WAREHOUSE:
|
||||
shelves = Warehouse.objects.filter(parent_warehouse=warehouse)
|
||||
boxcases = Warehouse.objects.filter(parent_warehouse__in=[s.id for s in shelves])
|
||||
documents = Document.objects.filter(warehouses__in=[b.id for b in boxcases])
|
||||
documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
|
||||
documents.delete()
|
||||
boxcases.delete()
|
||||
shelves.delete()
|
||||
@@ -1718,6 +1746,9 @@ class SystemStatusView(PassUserMixin):
|
||||
or Correspondent.objects.filter(
|
||||
matching_algorithm=Tag.MATCH_AUTO,
|
||||
).exists()
|
||||
or Warehouse.objects.filter(
|
||||
matching_algorithm=Tag.MATCH_AUTO,
|
||||
).exists()
|
||||
or StoragePath.objects.filter(
|
||||
matching_algorithm=Tag.MATCH_AUTO,
|
||||
).exists()
|
||||
@@ -1812,31 +1843,10 @@ class WarehouseViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
def create(self, request, *args, **kwargs):
|
||||
# try:
|
||||
serializer = WarehouseSerializer(data=request.data)
|
||||
name = None
|
||||
type = None
|
||||
parent_warehouse = None
|
||||
if serializer.is_valid(raise_exception=True):
|
||||
name = serializer.validated_data.get("name", "")
|
||||
type = serializer.validated_data.get("type", Warehouse.WAREHOUSE)
|
||||
parent_warehouse = serializer.validated_data.get('parent_warehouse',None)
|
||||
# check_warehouse = Warehouse.objects.filter(
|
||||
# name = name,
|
||||
# type = type,
|
||||
# parent_warehouse=parent_warehouse
|
||||
# )
|
||||
|
||||
# if check_warehouse:
|
||||
# return Response({'status':400,
|
||||
# 'message':'created fail'},status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# if type == Warehouse.SHELF and parent_warehouse == None:
|
||||
# return Response({'status': 400,
|
||||
# 'message': 'parent_warehouse is required for Shelf type'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# elif type == Warehouse.BOXCASE and parent_warehouse == None:
|
||||
# return Response({'status': 400,
|
||||
# 'message': 'parent_warehouse is required for Boxcase type'}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# if serializer.is_valid(raise_exception=True):
|
||||
|
||||
parent_warehouse = Warehouse.objects.filter(id=parent_warehouse.id if parent_warehouse else 0).first()
|
||||
|
||||
if serializer.validated_data.get("type") == Warehouse.WAREHOUSE and not parent_warehouse:
|
||||
@@ -1853,7 +1863,27 @@ class WarehouseViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
'message':'created successfully',
|
||||
'data':serializer.data},status=status.HTTP_201_CREATED)
|
||||
|
||||
# except Exception as e:
|
||||
# return Response({'status':400,
|
||||
# 'message':e},status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, pk, *args, **kwargs):
|
||||
warehouse = Warehouse.objects.get(id=pk)
|
||||
if warehouse.type == Warehouse.SHELF:
|
||||
boxcases = Warehouse.objects.filter(parent_warehouse=warehouse)
|
||||
documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
|
||||
documents.delete()
|
||||
boxcases.delete()
|
||||
warehouse.delete()
|
||||
if warehouse.type == Warehouse.BOXCASE:
|
||||
documents = Document.objects.filter(warehouse=warehouse)
|
||||
documents.delete()
|
||||
warehouse.delete()
|
||||
if warehouse.type == Warehouse.WAREHOUSE:
|
||||
shelves = Warehouse.objects.filter(parent_warehouse=warehouse)
|
||||
boxcases = Warehouse.objects.filter(parent_warehouse__in=[s.id for s in shelves])
|
||||
documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
|
||||
documents.delete()
|
||||
boxcases.delete()
|
||||
shelves.delete()
|
||||
warehouse.delete()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user