diff --git a/src-ui/src/app/components/document-list/document-list.component.html b/src-ui/src/app/components/document-list/document-list.component.html
index bb9513559..350333887 100644
--- a/src-ui/src/app/components/document-list/document-list.component.html
+++ b/src-ui/src/app/components/document-list/document-list.component.html
@@ -125,7 +125,7 @@
@if (displayMode === 'largeCards') {
@for (d of list.documents; track trackByDocumentId($index, d)) {
-
+
}
@@ -271,8 +271,8 @@
}
@if (permissionService.currentUserCan(PermissionAction.View, PermissionType.Warehouse)) {
- @if (d.warehouses) {
- {{(d.warehouses$ | async)?.name}}
+ @if (d.warehouse) {
+ {{(d.warehouse$ | async)?.name}}
}
|
}
diff --git a/src-ui/src/app/components/document-list/filter-editor/filter-editor.component.spec.ts b/src-ui/src/app/components/document-list/filter-editor/filter-editor.component.spec.ts
index 41ed857fd..5f522e5a2 100644
--- a/src-ui/src/app/components/document-list/filter-editor/filter-editor.component.spec.ts
+++ b/src-ui/src/app/components/document-list/filter-editor/filter-editor.component.spec.ts
@@ -263,6 +263,9 @@ describe('FilterEditorComponent', () => {
httpTestingController.expectNone(
`${environment.apiBaseUrl}documents/storage_paths/`
)
+ httpTestingController.expectNone(
+ `${environment.apiBaseUrl}documents/warehouses/`
+ )
})
// SET filterRules
@@ -1807,6 +1810,10 @@ describe('FilterEditorComponent', () => {
{ id: 32, document_count: 1 },
{ id: 33, document_count: 0 },
],
+ selected_warehouses: [
+ { id: 42, document_count: 1 },
+ { id: 43, document_count: 0 },
+ ],
}
})
@@ -1865,6 +1872,24 @@ describe('FilterEditorComponent', () => {
]
expect(component.generateFilterName()).toEqual('Without storage path')
+ component.filterRules = [
+ {
+ rule_type: FILTER_HAS_WAREHOUSE_ANY,
+ value: '42',
+ },
+ ]
+ expect(component.generateFilterName()).toEqual(
+ `Warehouse path: ${warehouses[0].name}`
+ )
+
+ component.filterRules = [
+ {
+ rule_type: FILTER_WAREHOUSE,
+ value: null,
+ },
+ ]
+ expect(component.generateFilterName()).toEqual('Without warehouse')
+
component.filterRules = [
{
rule_type: FILTER_HAS_TAGS_ALL,
diff --git a/src-ui/src/app/components/manage/warehouse-list/warehouse-list.component.ts b/src-ui/src/app/components/manage/warehouse-list/warehouse-list.component.ts
index b721fc7ac..c377f8013 100644
--- a/src-ui/src/app/components/manage/warehouse-list/warehouse-list.component.ts
+++ b/src-ui/src/app/components/manage/warehouse-list/warehouse-list.component.ts
@@ -1,6 +1,6 @@
import { Component } from '@angular/core'
import { NgbModal } from '@ng-bootstrap/ng-bootstrap'
-import { FILTER_HAS_TAGS_ALL } from 'src/app/data/filter-rule-type'
+import { FILTER_HAS_WAREHOUSE_ANY } from 'src/app/data/filter-rule-type'
import { Warehouse } from 'src/app/data/warehouse'
import { DocumentListViewService } from 'src/app/services/document-list-view.service'
import {
@@ -32,7 +32,7 @@ export class WarehouseListComponent extends ManagementListComponent
{
toastService,
documentListViewService,
permissionsService,
- FILTER_HAS_TAGS_ALL,
+ FILTER_HAS_WAREHOUSE_ANY,
$localize`warehouse`,
$localize`warehouses`,
PermissionType.Warehouse,
diff --git a/src-ui/src/app/data/document.ts b/src-ui/src/app/data/document.ts
index 4b249fc7e..acb6db8d4 100644
--- a/src-ui/src/app/data/document.ts
+++ b/src-ui/src/app/data/document.ts
@@ -29,9 +29,9 @@ export interface Document extends ObjectWithPermissions {
storage_path?: number
- warehouses$?: Observable
+ warehouse$?: Observable
- warehouses?: number
+ warehouse?: number
title?: string
diff --git a/src-ui/src/app/data/filter-rule-type.ts b/src-ui/src/app/data/filter-rule-type.ts
index 4accfa68c..f6821c1ec 100644
--- a/src-ui/src/app/data/filter-rule-type.ts
+++ b/src-ui/src/app/data/filter-rule-type.ts
@@ -114,20 +114,20 @@ export const FILTER_RULE_TYPES: FilterRuleType[] = [
},
{
id: FILTER_WAREHOUSE,
- filtervar: 'warehouses__id',
- isnull_filtervar: 'warehouses__isnull',
+ filtervar: 'warehouse__id',
+ isnull_filtervar: 'warehouse__isnull',
datatype: 'warehouse',
multi: false,
},
{
id: FILTER_HAS_WAREHOUSE_ANY,
- filtervar: 'warehouses__id__in',
+ filtervar: 'warehouse__id__in',
datatype: 'warehouse',
multi: true,
},
{
id: FILTER_DOES_NOT_HAVE_WAREHOUSE,
- filtervar: 'warehouses__id__none',
+ filtervar: 'warehouse__id__none',
datatype: 'warehouse',
multi: true,
},
diff --git a/src-ui/src/app/data/mail-rule.ts b/src-ui/src/app/data/mail-rule.ts
index 2611fa3ba..1f9a82757 100644
--- a/src-ui/src/app/data/mail-rule.ts
+++ b/src-ui/src/app/data/mail-rule.ts
@@ -67,6 +67,8 @@ export interface MailRule extends ObjectWithPermissions {
assign_document_type?: number // PaperlessDocumentType.id
+ assign_warehouse?: number // PaperlessWarehouse.id
+
assign_correspondent_from?: MailMetadataCorrespondentOption
assign_correspondent?: number // PaperlessCorrespondent.id
diff --git a/src-ui/src/app/data/warehouse.ts b/src-ui/src/app/data/warehouse.ts
index 27b936df0..2e6141055 100644
--- a/src-ui/src/app/data/warehouse.ts
+++ b/src-ui/src/app/data/warehouse.ts
@@ -5,4 +5,6 @@ export interface Warehouse extends MatchingModel {
type?: string
parent_warehouse?: number
+
+ path?: string
}
diff --git a/src-ui/src/app/data/workflow-action.ts b/src-ui/src/app/data/workflow-action.ts
index 77918c96c..58d284d6c 100644
--- a/src-ui/src/app/data/workflow-action.ts
+++ b/src-ui/src/app/data/workflow-action.ts
@@ -17,7 +17,7 @@ export interface WorkflowAction extends ObjectWithId {
assign_storage_path?: number // StoragePath.id
- assign_warehouses?: number // Warehouse.id
+ assign_warehouse?: number // Warehouse.id
assign_owner?: number // User.id
diff --git a/src-ui/src/app/data/workflow-trigger.ts b/src-ui/src/app/data/workflow-trigger.ts
index 3e3bf8cf8..bd754eb22 100644
--- a/src-ui/src/app/data/workflow-trigger.ts
+++ b/src-ui/src/app/data/workflow-trigger.ts
@@ -34,4 +34,7 @@ export interface WorkflowTrigger extends ObjectWithId {
filter_has_correspondent?: number // Correspondent.id
filter_has_document_type?: number // DocumentType.id
+
+ filter_has_warehouse?: number // Warehouse.id
+
}
diff --git a/src-ui/src/app/services/document-list-view.service.spec.ts b/src-ui/src/app/services/document-list-view.service.spec.ts
index afbe83175..b538b2c3f 100644
--- a/src-ui/src/app/services/document-list-view.service.spec.ts
+++ b/src-ui/src/app/services/document-list-view.service.spec.ts
@@ -29,6 +29,7 @@ const documents = [
correspondent: 11,
document_type: 3,
storage_path: 8,
+ warehouse: 14,
},
{
id: 2,
diff --git a/src-ui/src/app/services/open-documents.service.spec.ts b/src-ui/src/app/services/open-documents.service.spec.ts
index 09341da62..abc4da80b 100644
--- a/src-ui/src/app/services/open-documents.service.spec.ts
+++ b/src-ui/src/app/services/open-documents.service.spec.ts
@@ -20,6 +20,7 @@ const documents = [
correspondent: 11,
document_type: 3,
storage_path: 8,
+ warehouse: 14,
},
{
id: 2,
diff --git a/src-ui/src/app/services/permissions.service.spec.ts b/src-ui/src/app/services/permissions.service.spec.ts
index 9f4929f6d..9a469ff38 100644
--- a/src-ui/src/app/services/permissions.service.spec.ts
+++ b/src-ui/src/app/services/permissions.service.spec.ts
@@ -138,6 +138,7 @@ describe('PermissionsService', () => {
'view_savedview',
'view_uisettings',
'delete_storagepath',
+ 'delete_warehouse',
'delete_frontendsettings',
'change_paperlesstask',
'view_taskresult',
@@ -185,6 +186,7 @@ describe('PermissionsService', () => {
'delete_document',
'change_uisettings',
'change_storagepath',
+ 'change_warehouse',
'change_document',
'delete_tokenproxy',
'change_note',
@@ -210,6 +212,7 @@ describe('PermissionsService', () => {
'change_tag',
'change_chordcounter',
'add_storagepath',
+ 'add_warehouse',
'delete_group',
'add_taskattributes',
'delete_mailaccount',
@@ -240,6 +243,7 @@ describe('PermissionsService', () => {
'delete_taskresult',
'view_contenttype',
'view_storagepath',
+ 'view_warehouse',
'add_permission',
'change_userobjectpermission',
'delete_savedviewfilterrule',
diff --git a/src-ui/src/app/services/rest/document.service.spec.ts b/src-ui/src/app/services/rest/document.service.spec.ts
index 1f3ccc0af..010e830fd 100644
--- a/src-ui/src/app/services/rest/document.service.spec.ts
+++ b/src-ui/src/app/services/rest/document.service.spec.ts
@@ -24,6 +24,7 @@ const documents = [
correspondent: 11,
document_type: 3,
storage_path: 8,
+ warehouse: 14,
},
{
id: 2,
@@ -225,6 +226,7 @@ describe(`DocumentService`, () => {
expect(doc.document_type$).not.toBeNull()
expect(doc.tags$).not.toBeNull()
expect(doc.storage_path$).not.toBeNull()
+ expect(doc.warehouse$).not.toBeNull()
})
httpTestingController
.expectOne(
diff --git a/src-ui/src/app/services/rest/document.service.ts b/src-ui/src/app/services/rest/document.service.ts
index 3c4f3cf8a..ca5c333e8 100644
--- a/src-ui/src/app/services/rest/document.service.ts
+++ b/src-ui/src/app/services/rest/document.service.ts
@@ -28,7 +28,7 @@ export const DOCUMENT_SORT_FIELDS = [
{ field: 'correspondent__name', name: $localize`Correspondent` },
{ field: 'title', name: $localize`Title` },
{ field: 'document_type__name', name: $localize`Document type` },
- { field: 'warehouses__name', name: $localize`Warehouse` },
+ { field: 'warehouse__name', name: $localize`Warehouse` },
{ field: 'created', name: $localize`Created` },
{ field: 'added', name: $localize`Added` },
{ field: 'modified', name: $localize`Modified` },
@@ -123,13 +123,13 @@ export class DocumentService extends AbstractPaperlessService {
doc.storage_path$ = this.storagePathService.getCached(doc.storage_path)
}
if (
- doc.warehouses &&
+ doc.warehouse &&
this.permissionsService.currentUserCan(
PermissionAction.View,
PermissionType.Warehouse
)
) {
- doc.warehouses$ = this.warehouseService.getCached(doc.warehouses)
+ doc.warehouse$ = this.warehouseService.getCached(doc.warehouse)
}
return doc
}
diff --git a/src-ui/src/app/services/rest/group.service.spec.ts b/src-ui/src/app/services/rest/group.service.spec.ts
index 98183b589..ac8c1a9b8 100644
--- a/src-ui/src/app/services/rest/group.service.spec.ts
+++ b/src-ui/src/app/services/rest/group.service.spec.ts
@@ -44,6 +44,7 @@ const group = {
'view_savedview',
'view_uisettings',
'delete_storagepath',
+ 'delete_warehouse',
'delete_frontendsettings',
'change_paperlesstask',
'view_taskresult',
@@ -91,6 +92,7 @@ const group = {
'delete_document',
'change_uisettings',
'change_storagepath',
+ 'change_warehouse',
'change_document',
'delete_tokenproxy',
'change_note',
@@ -116,6 +118,7 @@ const group = {
'change_tag',
'change_chordcounter',
'add_storagepath',
+ 'add_warehouse',
'delete_group',
'add_taskattributes',
'delete_mailaccount',
@@ -146,6 +149,7 @@ const group = {
'delete_taskresult',
'view_contenttype',
'view_storagepath',
+ 'view_warehouse',
'add_permission',
'change_userobjectpermission',
'delete_savedviewfilterrule',
diff --git a/src/documents/admin.py b/src/documents/admin.py
index 5df235618..46a726400 100644
--- a/src/documents/admin.py
+++ b/src/documents/admin.py
@@ -13,6 +13,7 @@ from documents.models import SavedView
from documents.models import SavedViewFilterRule
from documents.models import ShareLink
from documents.models import StoragePath
+from documents.models import Warehouse
from documents.models import Tag
if settings.AUDIT_LOG_ENABLED:
@@ -38,6 +39,10 @@ class DocumentTypeAdmin(GuardedModelAdmin):
list_filter = ("matching_algorithm",)
list_editable = ("match", "matching_algorithm")
+class WarehouseAdmin(GuardedModelAdmin):
+ list_display = ("name", "type", "path", "parent_warehouse", "match", "matching_algorithm")
+ list_filter = ("matching_algorithm",)
+ list_editable = ("match", "matching_algorithm")
class DocumentAdmin(GuardedModelAdmin):
search_fields = ("correspondent__name", "title", "content", "tags__name")
@@ -188,6 +193,7 @@ class CustomFieldInstancesAdmin(GuardedModelAdmin):
admin.site.register(Correspondent, CorrespondentAdmin)
admin.site.register(Tag, TagAdmin)
admin.site.register(DocumentType, DocumentTypeAdmin)
+admin.site.register(Warehouse, WarehouseAdmin)
admin.site.register(Document, DocumentAdmin)
admin.site.register(SavedView, SavedViewAdmin)
admin.site.register(StoragePath, StoragePathAdmin)
diff --git a/src/documents/apps.py b/src/documents/apps.py
index 7ed006d06..3a3a429f8 100644
--- a/src/documents/apps.py
+++ b/src/documents/apps.py
@@ -15,6 +15,7 @@ class DocumentsConfig(AppConfig):
from documents.signals.handlers import run_workflow_added
from documents.signals.handlers import run_workflow_updated
from documents.signals.handlers import set_correspondent
+ from documents.signals.handlers import set_warehouse
from documents.signals.handlers import set_document_type
from documents.signals.handlers import set_log_entry
from documents.signals.handlers import set_storage_path
@@ -22,6 +23,7 @@ class DocumentsConfig(AppConfig):
document_consumption_finished.connect(add_inbox_tags)
document_consumption_finished.connect(set_correspondent)
+ document_consumption_finished.connect(set_warehouse)
document_consumption_finished.connect(set_document_type)
document_consumption_finished.connect(set_tags)
document_consumption_finished.connect(set_storage_path)
diff --git a/src/documents/bulk_edit.py b/src/documents/bulk_edit.py
index 362c28e20..31b309c87 100644
--- a/src/documents/bulk_edit.py
+++ b/src/documents/bulk_edit.py
@@ -15,6 +15,7 @@ from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
+from documents.models import Warehouse
from documents.permissions import set_permissions_for_object
from documents.tasks import bulk_update_documents
from documents.tasks import consume_file
@@ -52,6 +53,22 @@ def set_storage_path(doc_ids, storage_path):
return "OK"
+def set_warehouse(doc_ids, warehouse):
+ if warehouse:
+ warehouse = Warehouse.objects.get(id=warehouse)
+
+ qs = Document.objects.filter(
+ Q(id__in=doc_ids) & ~Q(warehouse=warehouse),
+ )
+ affected_docs = [doc.id for doc in qs]
+ qs.update(warehouse=warehouse)
+
+ bulk_update_documents.delay(
+ document_ids=affected_docs,
+ )
+
+ return "OK"
+
def set_document_type(doc_ids, document_type):
if document_type:
diff --git a/src/documents/classifier.py b/src/documents/classifier.py
index b3787abab..02fbcda98 100644
--- a/src/documents/classifier.py
+++ b/src/documents/classifier.py
@@ -86,6 +86,7 @@ class DocumentClassifier:
self.tags_binarizer = None
self.tags_classifier = None
self.correspondent_classifier = None
+ self.warehouse_classifier = None
self.document_type_classifier = None
self.storage_path_classifier = None
@@ -112,6 +113,7 @@ class DocumentClassifier:
self.tags_classifier = pickle.load(f)
self.correspondent_classifier = pickle.load(f)
+ self.warehouse_classifier = pickle.load(f)
self.document_type_classifier = pickle.load(f)
self.storage_path_classifier = pickle.load(f)
except Exception as err:
@@ -148,6 +150,7 @@ class DocumentClassifier:
pickle.dump(self.tags_classifier, f)
pickle.dump(self.correspondent_classifier, f)
+ pickle.dump(self.warehouse_classifier, f)
pickle.dump(self.document_type_classifier, f)
pickle.dump(self.storage_path_classifier, f)
@@ -165,6 +168,7 @@ class DocumentClassifier:
labels_tags = []
labels_correspondent = []
+ labels_warehouse = []
labels_document_type = []
labels_storage_path = []
@@ -185,6 +189,13 @@ class DocumentClassifier:
y = cor.pk
hasher.update(y.to_bytes(4, "little", signed=True))
labels_correspondent.append(y)
+
+ y = -1
+ wh = doc.warehouse
+ if wh and wh.matching_algorithm == MatchingModel.MATCH_AUTO:
+ y = wh.pk
+ hasher.update(y.to_bytes(4, "little", signed=True))
+ labels_warehouse.append(y)
tags = sorted(
tag.pk
@@ -234,10 +245,11 @@ class DocumentClassifier:
# it usually is.
num_correspondents = len(set(labels_correspondent) | {-1}) - 1
num_document_types = len(set(labels_document_type) | {-1}) - 1
+ num_warehouses = len(set(labels_warehouse) | {-1}) - 1
num_storage_paths = len(set(labels_storage_path) | {-1}) - 1
logger.debug(
- f"{docs_queryset.count()} documents, {num_tags} tag(s), {num_correspondents} correspondent(s), "
+ f"{docs_queryset.count()} documents, {num_tags} tag(s), {num_correspondents} correspondent(s), {num_warehouses} warehouse(s) "
f"{num_document_types} document type(s). {num_storage_paths} storage path(es)",
)
@@ -304,6 +316,17 @@ class DocumentClassifier:
"classifier.",
)
+ if num_warehouses > 0:
+ logger.debug("Training warehouse classifier...")
+ self.warehouse_classifier = MLPClassifier(tol=0.01)
+ self.warehouse_classifier.fit(data_vectorized, labels_warehouse)
+ else:
+ self.warehouse_classifier = None
+ logger.debug(
+ "There are no warehouses. Not training warehouse "
+ "classifier.",
+ )
+
if num_document_types > 0:
logger.debug("Training document type classifier...")
self.document_type_classifier = MLPClassifier(tol=0.01)
@@ -414,6 +437,17 @@ class DocumentClassifier:
return None
else:
return None
+
+ def predict_warehouse(self, content: str) -> Optional[int]:
+ if self.warehouse_classifier:
+ X = self.data_vectorizer.transform([self.preprocess_content(content)])
+ warehouse_id = self.warehouse_classifier.predict(X)
+ if warehouse_id != -1:
+ return warehouse_id
+ else:
+ return None
+ else:
+ return None
def predict_document_type(self, content: str) -> Optional[int]:
if self.document_type_classifier:
diff --git a/src/documents/consumer.py b/src/documents/consumer.py
index fa3bf2e75..2a805d828 100644
--- a/src/documents/consumer.py
+++ b/src/documents/consumer.py
@@ -32,6 +32,7 @@ from documents.models import Document
from documents.models import DocumentType
from documents.models import FileInfo
from documents.models import StoragePath
+from documents.models import Warehouse
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowAction
@@ -76,6 +77,7 @@ class WorkflowTriggerPlugin(
.prefetch_related("actions__assign_custom_fields")
.prefetch_related("actions__remove_tags")
.prefetch_related("actions__remove_correspondents")
+ .prefetch_related("actions__remove_warehouses")
.prefetch_related("actions__remove_document_types")
.prefetch_related("actions__remove_storage_paths")
.prefetch_related("actions__remove_custom_fields")
@@ -110,6 +112,10 @@ class WorkflowTriggerPlugin(
action_overrides.document_type_id = (
action.assign_document_type.pk
)
+ if action.assign_warehouse is not None:
+ action_overrides.warehouse_id = (
+ action.assign_warehouse.pk
+ )
if action.assign_storage_path is not None:
action_overrides.storage_path_id = (
action.assign_storage_path.pk
@@ -298,6 +304,7 @@ class Consumer(LoggingMixin):
self.filename = None
self.override_title = None
self.override_correspondent_id = None
+ self.override_warehouse_id = None
self.override_tag_ids = None
self.override_document_type_id = None
self.override_asn = None
@@ -494,6 +501,7 @@ class Consumer(LoggingMixin):
override_correspondent_id=None,
override_document_type_id=None,
override_tag_ids=None,
+ override_warehouse_id=None,
override_storage_path_id=None,
task_id=None,
override_created=None,
@@ -515,6 +523,7 @@ class Consumer(LoggingMixin):
self.override_correspondent_id = override_correspondent_id
self.override_document_type_id = override_document_type_id
self.override_tag_ids = override_tag_ids
+ self.override_warehouse_id = override_warehouse_id
self.override_storage_path_id = override_storage_path_id
self.task_id = task_id or str(uuid.uuid4())
self.override_created = override_created
@@ -873,6 +882,11 @@ class Consumer(LoggingMixin):
document.storage_path = StoragePath.objects.get(
pk=self.override_storage_path_id,
)
+
+ if self.override_warehouse_id:
+ document.warehouse = Warehouse.objects.get(
+ pk=self.override_warehouse_id,
+ )
if self.override_asn:
document.archive_serial_number = self.override_asn
diff --git a/src/documents/data_models.py b/src/documents/data_models.py
index b99c8511d..b2a31fd54 100644
--- a/src/documents/data_models.py
+++ b/src/documents/data_models.py
@@ -16,13 +16,14 @@ class DocumentMetadataOverrides:
be set from content or matching. All fields default to None,
meaning no override is happening
"""
-
+
filename: Optional[str] = None
title: Optional[str] = None
correspondent_id: Optional[int] = None
document_type_id: Optional[int] = None
tag_ids: Optional[list[int]] = None
storage_path_id: Optional[int] = None
+ warehouse_id: Optional[int] = None
created: Optional[datetime.datetime] = None
asn: Optional[int] = None
owner_id: Optional[int] = None
@@ -48,6 +49,8 @@ class DocumentMetadataOverrides:
self.document_type_id = other.document_type_id
if other.storage_path_id is not None:
self.storage_path_id = other.storage_path_id
+ if other.warehouse_id is not None:
+ self.warehouse_id = other.warehouse_id
if other.owner_id is not None:
self.owner_id = other.owner_id
@@ -100,6 +103,7 @@ class DocumentMetadataOverrides:
overrides.correspondent_id = doc.correspondent.id if doc.correspondent else None
overrides.document_type_id = doc.document_type.id if doc.document_type else None
overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None
+ overrides.warehouse_id = doc.warehouse.id if doc.warehouse else None
overrides.owner_id = doc.owner.id if doc.owner else None
overrides.tag_ids = list(doc.tags.values_list("id", flat=True))
diff --git a/src/documents/file_handling.py b/src/documents/file_handling.py
index 700a16d8b..9daa64de9 100644
--- a/src/documents/file_handling.py
+++ b/src/documents/file_handling.py
@@ -174,6 +174,14 @@ def generate_filename(
)
else:
document_type = no_value_default
+
+ if doc.warehouse:
+ warehouse = pathvalidate.sanitize_filename(
+ doc.warehouse.name,
+ replacement_text="-",
+ )
+ else:
+ warehouse = no_value_default
if doc.archive_serial_number:
asn = str(doc.archive_serial_number)
@@ -199,6 +207,7 @@ def generate_filename(
title=pathvalidate.sanitize_filename(doc.title, replacement_text="-"),
correspondent=correspondent,
document_type=document_type,
+ warehouse=warehouse,
created=local_created.isoformat(),
created_year=local_created.strftime("%Y"),
created_year_short=local_created.strftime("%y"),
diff --git a/src/documents/filters.py b/src/documents/filters.py
index bf55db3ec..771d5e784 100644
--- a/src/documents/filters.py
+++ b/src/documents/filters.py
@@ -192,7 +192,7 @@ class DocumentFilterSet(FilterSet):
storage_path__id__none = ObjectFilter(field_name="storage_path", exclude=True)
- warehouses__id__none = ObjectFilter(field_name="warehouses", exclude=True)
+ warehouse__id__none = ObjectFilter(field_name="warehouse", exclude=True)
is_in_inbox = InboxFilter()
@@ -227,9 +227,9 @@ class DocumentFilterSet(FilterSet):
"storage_path": ["isnull"],
"storage_path__id": ID_KWARGS,
"storage_path__name": CHAR_KWARGS,
- "warehouses": ["isnull"],
- "warehouses__id": ID_KWARGS,
- "warehouses__name": CHAR_KWARGS,
+ "warehouse": ["isnull"],
+ "warehouse__id": ID_KWARGS,
+ "warehouse__name": CHAR_KWARGS,
"owner": ["isnull"],
"owner__id": ID_KWARGS,
"custom_fields": ["icontains"],
diff --git a/src/documents/index.py b/src/documents/index.py
index 71b9b8097..b699243d5 100644
--- a/src/documents/index.py
+++ b/src/documents/index.py
@@ -60,6 +60,9 @@ def get_schema():
type=TEXT(sortable=True),
type_id=NUMERIC(),
has_type=BOOLEAN(),
+ warehouse=TEXT(sortable=True),
+ warehouse_id=NUMERIC(),
+ has_warehouse=BOOLEAN(),
created=DATETIME(sortable=True),
modified=DATETIME(sortable=True),
added=DATETIME(sortable=True),
@@ -155,6 +158,9 @@ def update_document(writer: AsyncWriter, doc: Document):
type=doc.document_type.name if doc.document_type else None,
type_id=doc.document_type.id if doc.document_type else None,
has_type=doc.document_type is not None,
+ warehouse=doc.warehouse.name if doc.warehouse else None,
+ warehouse_id=doc.warehouse.id if doc.warehouse else None,
+ has_warehouse=doc.warehouse is not None,
created=doc.created,
added=doc.added,
asn=asn,
@@ -197,6 +203,7 @@ def remove_document_from_index(document: Document):
class DelayedQuery:
param_map = {
"correspondent": ("correspondent", ["id", "id__in", "id__none", "isnull"]),
+ "warehouse": ("warehouse", ["id", "id__in", "id__none", "isnull"]),
"document_type": ("type", ["id", "id__in", "id__none", "isnull"]),
"storage_path": ("path", ["id", "id__in", "id__none", "isnull"]),
"owner": ("owner", ["id", "id__in", "id__none", "isnull"]),
diff --git a/src/documents/matching.py b/src/documents/matching.py
index 6ffa1b3aa..5c5629003 100644
--- a/src/documents/matching.py
+++ b/src/documents/matching.py
@@ -7,6 +7,7 @@ from documents.classifier import DocumentClassifier
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentSource
from documents.models import Correspondent
+from documents.models import Warehouse
from documents.models import Document
from documents.models import DocumentType
from documents.models import MatchingModel
@@ -56,6 +57,28 @@ def match_correspondents(document: Document, classifier: DocumentClassifier, use
),
)
+def match_warehouses(document: Document, classifier: DocumentClassifier, user=None):
+ pred_id = classifier.predict_warehouse(document.content) if classifier else None
+
+ if user is None and document.owner is not None:
+ user = document.owner
+
+ if user is not None:
+ warehouses = get_objects_for_user_owner_aware(
+ user,
+ "documents.view_warehouse",
+ Warehouse,
+ )
+ else:
+ warehouses = Warehouse.objects.all()
+
+ return list(
+ filter(
+ lambda o: matches(o, document)
+ or (o.pk == pred_id and o.matching_algorithm == MatchingModel.MATCH_AUTO),
+ warehouses,
+ ),
+ )
def match_document_types(document: Document, classifier: DocumentClassifier, user=None):
pred_id = classifier.predict_document_type(document.content) if classifier else None
@@ -356,6 +379,16 @@ def existing_document_matches_workflow(
f"Document correspondent {document.correspondent} does not match {trigger.filter_has_correspondent}",
)
trigger_matched = False
+
+ # Document warehouse vs trigger has_warehouse
+ if (
+ trigger.filter_has_warehouse is not None
+ and document.warehouse != trigger.filter_has_warehouse
+ ):
+ reason = (
+ f"Document warehouse {document.warehouse} does not match {trigger.filter_has_warehouse}",
+ )
+ trigger_matched = False
# Document document_type vs trigger has_document_type
if (
diff --git a/src/documents/migrations/1052_warehouse_path.py b/src/documents/migrations/1052_warehouse_path.py
new file mode 100644
index 000000000..d61098a2e
--- /dev/null
+++ b/src/documents/migrations/1052_warehouse_path.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.11 on 2024-05-30 07:28
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('documents', '1051_alter_warehouse_options_warehouse_is_insensitive_and_more'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='warehouse',
+ name='path',
+ field=models.TextField(blank=True, null=True, verbose_name='path'),
+ ),
+ ]
diff --git a/src/documents/migrations/1053_remove_document_warehouses_document_warehouse.py b/src/documents/migrations/1053_remove_document_warehouses_document_warehouse.py
new file mode 100644
index 000000000..5ebbd407f
--- /dev/null
+++ b/src/documents/migrations/1053_remove_document_warehouses_document_warehouse.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.2.11 on 2024-05-30 12:43
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('documents', '1052_warehouse_path'),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name='document',
+ name='warehouses',
+ ),
+ migrations.AddField(
+ model_name='document',
+ name='warehouse',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='documents', to='documents.warehouse', verbose_name='warehouse'),
+ ),
+ ]
diff --git a/src/documents/models.py b/src/documents/models.py
index 515557947..c241acf52 100644
--- a/src/documents/models.py
+++ b/src/documents/models.py
@@ -144,6 +144,7 @@ class Warehouse(MatchingModel):
choices=TYPE_WAREHOUSE,
default=WAREHOUSE,)
parent_warehouse = models.ForeignKey('self', on_delete=models.CASCADE, null=True, blank=True )
+ path = models.TextField(_("path"), null=True, blank=True)
class Meta(MatchingModel.Meta):
verbose_name = _("warehouse")
@@ -177,6 +178,15 @@ class Document(ModelWithOwner):
on_delete=models.SET_NULL,
verbose_name=_("storage path"),
)
+
+ warehouse = models.ForeignKey(
+ Warehouse,
+ blank=True,
+ null=True,
+ related_name="documents",
+ on_delete=models.SET_NULL,
+ verbose_name=_("warehouse"),
+ )
title = models.CharField(_("title"), max_length=128, blank=True, db_index=True)
@@ -207,15 +217,6 @@ class Document(ModelWithOwner):
verbose_name=_("tags"),
)
- warehouses = models.ForeignKey(
- Warehouse,
- blank=True,
- null=True,
- related_name="documents",
- on_delete=models.SET_NULL,
- verbose_name=_("warehouses"),
- )
-
checksum = models.CharField(
_("checksum"),
max_length=32,
diff --git a/src/documents/serialisers.py b/src/documents/serialisers.py
index 9115abde9..8708a5efa 100644
--- a/src/documents/serialisers.py
+++ b/src/documents/serialisers.py
@@ -428,7 +428,7 @@ class TagsField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return Tag.objects.all()
-class WarehousesField(serializers.PrimaryKeyRelatedField):
+class WarehouseField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return Warehouse.objects.all()
@@ -656,7 +656,7 @@ class DocumentSerializer(
):
correspondent = CorrespondentField(allow_null=True)
tags = TagsField(many=True)
- warehouses = WarehousesField(allow_null=True)
+ warehouse = WarehouseField(allow_null=True)
document_type = DocumentTypeField(allow_null=True)
storage_path = StoragePathField(allow_null=True)
@@ -775,10 +775,10 @@ class DocumentSerializer(
"correspondent",
"document_type",
"storage_path",
+ "warehouse",
"title",
"content",
"tags",
- "warehouses",
"created",
"created_date",
"modified",
@@ -882,6 +882,7 @@ class BulkEditSerializer(
"set_correspondent",
"set_document_type",
"set_storage_path",
+ "set_warehouse"
"add_tag",
"remove_tag",
"modify_tags",
@@ -916,6 +917,8 @@ class BulkEditSerializer(
return bulk_edit.set_document_type
elif method == "set_storage_path":
return bulk_edit.set_storage_path
+ elif method == "set_warehouse":
+ return bulk_edit.set_warehouse
elif method == "add_tag":
return bulk_edit.add_tag
elif method == "remove_tag":
@@ -971,6 +974,17 @@ class BulkEditSerializer(
raise serializers.ValidationError("Correspondent does not exist")
else:
raise serializers.ValidationError("correspondent not specified")
+ def _validate_parameters_warehouse(self, parameters):
+ if "warehouse" in parameters:
+ warehouse_id = parameters["warehouse"]
+ if warehouse_id is None:
+ return
+ try:
+ Warehouse.objects.get(id=warehouse_id)
+ except Warehouse.DoesNotExist:
+ raise serializers.ValidationError("Warehouse does not exist")
+ else:
+ raise serializers.ValidationError("warehouse not specified")
def _validate_storage_path(self, parameters):
if "storage_path" in parameters:
@@ -1059,6 +1073,8 @@ class BulkEditSerializer(
self._validate_parameters_modify_tags(parameters)
elif method == bulk_edit.set_storage_path:
self._validate_storage_path(parameters)
+ elif method == bulk_edit.set_warehouse:
+ self._validate_parameters_warehouse(parameters)
elif method == bulk_edit.set_permissions:
self._validate_parameters_set_permissions(parameters)
elif method == bulk_edit.rotate:
@@ -1107,6 +1123,14 @@ class PostDocumentSerializer(serializers.Serializer):
write_only=True,
required=False,
)
+
+ warehouse = serializers.PrimaryKeyRelatedField(
+ queryset=Warehouse.objects.all(),
+ label="Warehouse",
+ allow_null=True,
+ write_only=True,
+ required=False,
+ )
storage_path = serializers.PrimaryKeyRelatedField(
queryset=StoragePath.objects.all(),
@@ -1168,6 +1192,12 @@ class PostDocumentSerializer(serializers.Serializer):
return storage_path.id
else:
return None
+
+ def validate_warehouse(self, warehouse):
+ if warehouse:
+ return warehouse.id
+ else:
+ return None
def validate_tags(self, tags):
if tags:
@@ -1232,6 +1262,7 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
title="title",
correspondent="correspondent",
document_type="document_type",
+ warehouse="warehouse",
created="created",
created_year="created_year",
created_year_short="created_year_short",
@@ -1504,6 +1535,7 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
"filter_has_tags",
"filter_has_correspondent",
"filter_has_document_type",
+ "filter_has_warehouse",
]
def validate(self, attrs):
@@ -1540,6 +1572,8 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
assign_tags = TagsField(many=True, allow_null=True, required=False)
assign_document_type = DocumentTypeField(allow_null=True, required=False)
assign_storage_path = StoragePathField(allow_null=True, required=False)
+ assign_warehouse = WarehouseField(allow_null =True, required=False)
+
class Meta:
model = WorkflowAction
@@ -1551,6 +1585,7 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
"assign_correspondent",
"assign_document_type",
"assign_storage_path",
+ "assign_warehouse"
"assign_owner",
"assign_view_users",
"assign_view_groups",
@@ -1565,6 +1600,8 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
"remove_document_types",
"remove_all_storage_paths",
"remove_storage_paths",
+ "remove_all_warehouses",
+ "remove_warehouses",
"remove_custom_fields",
"remove_all_custom_fields",
"remove_all_owners",
@@ -1658,6 +1695,7 @@ class WorkflowSerializer(serializers.ModelSerializer):
remove_correspondents = action.pop("remove_correspondents", None)
remove_document_types = action.pop("remove_document_types", None)
remove_storage_paths = action.pop("remove_storage_paths", None)
+ remove_warehouses = action.pop("remove_warehouses", None)
remove_custom_fields = action.pop("remove_custom_fields", None)
remove_owners = action.pop("remove_owners", None)
remove_view_users = action.pop("remove_view_users", None)
@@ -1690,6 +1728,8 @@ class WorkflowSerializer(serializers.ModelSerializer):
action_instance.remove_document_types.set(remove_document_types)
if remove_storage_paths is not None:
action_instance.remove_storage_paths.set(remove_storage_paths)
+ if remove_warehouses is not None:
+ action_instance.remove_warehouses.set(remove_warehouses)
if remove_custom_fields is not None:
action_instance.remove_custom_fields.set(remove_custom_fields)
if remove_owners is not None:
@@ -1756,22 +1796,12 @@ class WorkflowSerializer(serializers.ModelSerializer):
class WarehouseSerializer(MatchingModelSerializer, OwnedObjectSerializer):
- document_count = serializers.SerializerMethodField()
- def get_document_count(self,obj):
- document = Document.objects.filter(warehouses=obj).count()
- return document
class Meta:
model = Warehouse
fields = '__all__'
- def to_representation(self, instance):
- data = super().to_representation(instance)
- if instance.parent_warehouse:
- data['parent_warehouse'] = WarehouseSerializer(instance.parent_warehouse).data
- else:
- data['parent_warehouse'] = None
- return data
+
\ No newline at end of file
diff --git a/src/documents/signals/handlers.py b/src/documents/signals/handlers.py
index cdfedcb4c..1d4596392 100644
--- a/src/documents/signals/handlers.py
+++ b/src/documents/signals/handlers.py
@@ -130,6 +130,59 @@ def set_correspondent(
document.correspondent = selected
document.save(update_fields=("correspondent",))
+def set_warehouse(
+ sender,
+ document: Document,
+ logging_group=None,
+ classifier: Optional[DocumentClassifier] = None,
+ replace=False,
+ use_first=True,
+ suggest=False,
+ base_url=None,
+ stdout=None,
+ style_func=None,
+ **kwargs,
+):
+ if document.warehouse and not replace:
+ return
+
+ potential_warehouses = matching.match_warehouses(document, classifier)
+
+ potential_count = len(potential_warehouses)
+ selected = potential_warehouses[0] if potential_warehouses else None
+ if potential_count > 1:
+ if use_first:
+ logger.debug(
+ f"Detected {potential_count} potential warehouses, "
+ f"so we've opted for {selected}",
+ extra={"group": logging_group},
+ )
+ else:
+ logger.debug(
+ f"Detected {potential_count} potential warehouses, "
+ f"not assigning any warehouse",
+ extra={"group": logging_group},
+ )
+ return
+
+ if selected or replace:
+ if suggest:
+ _suggestion_printer(
+ stdout,
+ style_func,
+ "warehouse",
+ document,
+ selected,
+ base_url,
+ )
+ else:
+ logger.info(
+ f"Assigning warehouse {selected} to {document}",
+ extra={"group": logging_group},
+ )
+
+ document.warehouse = selected
+ document.save(update_fields=("warehouse",))
def set_document_type(
sender,
@@ -545,6 +598,7 @@ def run_workflow(
.prefetch_related("actions__assign_custom_fields")
.prefetch_related("actions__remove_tags")
.prefetch_related("actions__remove_correspondents")
+ .prefetch_related("actions__remove_warehouses")
.prefetch_related("actions__remove_document_types")
.prefetch_related("actions__remove_storage_paths")
.prefetch_related("actions__remove_custom_fields")
@@ -570,6 +624,9 @@ def run_workflow(
if action.assign_correspondent is not None:
document.correspondent = action.assign_correspondent
+
+ if action.assign_warehouse is not None:
+ document.warehouse = action.assign_warehouse
if action.assign_document_type is not None:
document.document_type = action.assign_document_type
@@ -594,6 +651,11 @@ def run_workflow(
if document.document_type is not None
else ""
),
+ (
+ document.warehouse.name
+ if document.warehouse is not None
+ else ""
+ ),
(
document.owner.username
if document.owner is not None
@@ -692,6 +754,16 @@ def run_workflow(
)
):
document.correspondent = None
+
+ if action.remove_all_warehouses or (
+ document.warehouse
+ and (
+ action.remove_warehouses.filter(
+ pk=document.warehouse.pk,
+ ).exists()
+ )
+ ):
+ document.warehouse = None
if action.remove_all_document_types or (
document.document_type
diff --git a/src/documents/tasks.py b/src/documents/tasks.py
index 0ab55ac45..c60832490 100644
--- a/src/documents/tasks.py
+++ b/src/documents/tasks.py
@@ -33,6 +33,7 @@ from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
+from documents.models import Warehouse
from documents.models import Tag
from documents.parsers import DocumentParser
from documents.parsers import get_parser_class_for_mime_type
@@ -73,6 +74,7 @@ def train_classifier():
not Tag.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
and not DocumentType.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
and not Correspondent.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
+ and not Warehouse.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
and not StoragePath.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
):
logger.info("No automatic matching items, not training")
@@ -170,6 +172,7 @@ def consume_file(
override_correspondent_id=overrides.correspondent_id,
override_document_type_id=overrides.document_type_id,
override_tag_ids=overrides.tag_ids,
+ override_warehouse_id=overrides.warehouse_id,
override_storage_path_id=overrides.storage_path_id,
override_created=overrides.created,
override_asn=overrides.asn,
diff --git a/src/documents/views.py b/src/documents/views.py
index 72272d78c..0a7d5e094 100644
--- a/src/documents/views.py
+++ b/src/documents/views.py
@@ -104,6 +104,7 @@ from documents.filters import WarehouseFilterSet
from documents.matching import match_correspondents
from documents.matching import match_document_types
from documents.matching import match_storage_paths
+from documents.matching import match_warehouses
from documents.matching import match_tags
from documents.models import Correspondent
from documents.models import CustomField
@@ -336,7 +337,7 @@ class DocumentViewSet(
ObjectOwnedOrGrantedPermissionsFilter,
)
filterset_class = DocumentFilterSet
- search_fields = ("title", "correspondent__name", "content", "warehouses")
+ search_fields = ("title", "correspondent__name", "content", "warehouse")
ordering_fields = (
"id",
"title",
@@ -354,7 +355,7 @@ class DocumentViewSet(
return (
Document.objects.distinct()
.annotate(num_notes=Count("notes"))
- .select_related("correspondent", "storage_path", "document_type", "owner")
+ .select_related("correspondent", "storage_path", "document_type","warehouse", "owner")
.prefetch_related("tags", "custom_fields", "notes")
)
@@ -529,6 +530,9 @@ class DocumentViewSet(
"correspondents": [
c.id for c in match_correspondents(doc, classifier, request.user)
],
+ "warehouses": [
+ wh.id for wh in match_warehouses(doc, classifier, request.user)
+ ],
"tags": [t.id for t in match_tags(doc, classifier, request.user)],
"document_types": [
dt.id for dt in match_document_types(doc, classifier, request.user)
@@ -749,6 +753,7 @@ class SearchResultSerializer(DocumentSerializer, PassUserMixin):
"correspondent",
"storage_path",
"document_type",
+ "warehouse"
"owner",
)
.prefetch_related("tags", "custom_fields", "notes")
@@ -937,6 +942,7 @@ class PostDocumentView(GenericAPIView):
correspondent_id = serializer.validated_data.get("correspondent")
document_type_id = serializer.validated_data.get("document_type")
storage_path_id = serializer.validated_data.get("storage_path")
+ warehouse_id = serializer.validated_data.get("warehouse")
tag_ids = serializer.validated_data.get("tags")
title = serializer.validated_data.get("title")
created = serializer.validated_data.get("created")
@@ -965,6 +971,7 @@ class PostDocumentView(GenericAPIView):
correspondent_id=correspondent_id,
document_type_id=document_type_id,
storage_path_id=storage_path_id,
+ warehouse_id=warehouse_id,
tag_ids=tag_ids,
created=created,
asn=archive_serial_number,
@@ -1014,6 +1021,12 @@ class SelectionDataView(GenericAPIView):
Case(When(documents__id__in=ids, then=1), output_field=IntegerField()),
),
)
+
+ warehouses = Warehouse.objects.annotate(
+ document_count=Count(
+ Case(When(documents__id__in=ids, then=1), output_field=IntegerField()),
+ ),
+ )
r = Response(
{
@@ -1027,6 +1040,9 @@ class SelectionDataView(GenericAPIView):
"selected_document_types": [
{"id": t.id, "document_count": t.document_count} for t in types
],
+ "selected_warehouses": [
+ {"id": t.id, "document_count": t.document_count} for t in warehouses
+ ],
"selected_storage_paths": [
{"id": t.id, "document_count": t.document_count}
for t in storage_paths
@@ -1111,6 +1127,17 @@ class StatisticsView(APIView):
),
)
)
+ warehouse_count = (
+ Warehouse.objects.count()
+ if user is None
+ else len(
+ get_objects_for_user_owner_aware(
+ user,
+ "documents.view_warehouse",
+ Warehouse,
+ ),
+ )
+ )
storage_path_count = (
StoragePath.objects.count()
if user is None
@@ -1160,6 +1187,7 @@ class StatisticsView(APIView):
"correspondent_count": correspondent_count,
"document_type_count": document_type_count,
"storage_path_count": storage_path_count,
+ "warehouse_count": warehouse_count,
},
)
@@ -1531,18 +1559,18 @@ class BulkEditObjectsView(PassUserMixin):
if warehouse.type == Warehouse.SHELF:
boxcases = Warehouse.objects.filter(parent_warehouse=warehouse)
- documents = Document.objects.filter(warehouses__in=[b.id for b in boxcases])
+ documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
documents.delete()
boxcases.delete()
warehouse.delete()
if warehouse.type == Warehouse.BOXCASE:
- documents = Document.objects.filter(warehouses=warehouse)
+ documents = Document.objects.filter(warehouse=warehouse)
documents.delete()
warehouse.delete()
if warehouse.type == Warehouse.WAREHOUSE:
shelves = Warehouse.objects.filter(parent_warehouse=warehouse)
boxcases = Warehouse.objects.filter(parent_warehouse__in=[s.id for s in shelves])
- documents = Document.objects.filter(warehouses__in=[b.id for b in boxcases])
+ documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
documents.delete()
boxcases.delete()
shelves.delete()
@@ -1718,6 +1746,9 @@ class SystemStatusView(PassUserMixin):
or Correspondent.objects.filter(
matching_algorithm=Tag.MATCH_AUTO,
).exists()
+ or Warehouse.objects.filter(
+ matching_algorithm=Tag.MATCH_AUTO,
+ ).exists()
or StoragePath.objects.filter(
matching_algorithm=Tag.MATCH_AUTO,
).exists()
@@ -1812,31 +1843,10 @@ class WarehouseViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
def create(self, request, *args, **kwargs):
# try:
serializer = WarehouseSerializer(data=request.data)
- name = None
- type = None
parent_warehouse = None
if serializer.is_valid(raise_exception=True):
- name = serializer.validated_data.get("name", "")
- type = serializer.validated_data.get("type", Warehouse.WAREHOUSE)
parent_warehouse = serializer.validated_data.get('parent_warehouse',None)
- # check_warehouse = Warehouse.objects.filter(
- # name = name,
- # type = type,
- # parent_warehouse=parent_warehouse
- # )
-
- # if check_warehouse:
- # return Response({'status':400,
- # 'message':'created fail'},status=status.HTTP_400_BAD_REQUEST)
-
- # if type == Warehouse.SHELF and parent_warehouse == None:
- # return Response({'status': 400,
- # 'message': 'parent_warehouse is required for Shelf type'}, status=status.HTTP_400_BAD_REQUEST)
- # elif type == Warehouse.BOXCASE and parent_warehouse == None:
- # return Response({'status': 400,
- # 'message': 'parent_warehouse is required for Boxcase type'}, status=status.HTTP_400_BAD_REQUEST)
-
- # if serializer.is_valid(raise_exception=True):
+
parent_warehouse = Warehouse.objects.filter(id=parent_warehouse.id if parent_warehouse else 0).first()
if serializer.validated_data.get("type") == Warehouse.WAREHOUSE and not parent_warehouse:
@@ -1853,7 +1863,27 @@ class WarehouseViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
'message':'created successfully',
'data':serializer.data},status=status.HTTP_201_CREATED)
- # except Exception as e:
- # return Response({'status':400,
- # 'message':e},status=status.HTTP_400_BAD_REQUEST)
+
+ def destroy(self, request, pk, *args, **kwargs):
+ warehouse = Warehouse.objects.get(id=pk)
+ if warehouse.type == Warehouse.SHELF:
+ boxcases = Warehouse.objects.filter(parent_warehouse=warehouse)
+ documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
+ documents.delete()
+ boxcases.delete()
+ warehouse.delete()
+ if warehouse.type == Warehouse.BOXCASE:
+ documents = Document.objects.filter(warehouse=warehouse)
+ documents.delete()
+ warehouse.delete()
+ if warehouse.type == Warehouse.WAREHOUSE:
+ shelves = Warehouse.objects.filter(parent_warehouse=warehouse)
+ boxcases = Warehouse.objects.filter(parent_warehouse__in=[s.id for s in shelves])
+ documents = Document.objects.filter(warehouse__in=[b.id for b in boxcases])
+ documents.delete()
+ boxcases.delete()
+ shelves.delete()
+ warehouse.delete()
+
+ return Response(status=status.HTTP_204_NO_CONTENT)