Compare commits
302 Commits
feature-ei
...
chore-ymlf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8c2a0b9e6 | ||
|
|
5468bf28c4 | ||
|
|
7a07f1e81d | ||
|
|
92524ae97a | ||
|
|
1c89f6da24 | ||
|
|
d1a3e3b859 | ||
|
|
79ae594d54 | ||
|
|
97fe5c4176 | ||
|
|
e60bd3a132 | ||
|
|
5d6cfa7349 | ||
|
|
3105317137 | ||
|
|
1456169d7f | ||
|
|
22a6fe5e10 | ||
|
|
a0c1a19263 | ||
|
|
1f5d1b6f26 | ||
|
|
3b19a727b8 | ||
|
|
7146a5f4fc | ||
|
|
6babc61ba2 | ||
|
|
db5e54c6e5 | ||
|
|
4ef5fbfb6e | ||
|
|
b0390a92ea | ||
|
|
70898a5064 | ||
|
|
d07d425618 | ||
|
|
1f60f4a636 | ||
|
|
b29395cde7 | ||
|
|
2f70d58219 | ||
|
|
9944f81512 | ||
|
|
f4413e0a08 | ||
|
|
169aa8c8bd | ||
|
|
94556a2607 | ||
|
|
dcd50d5359 | ||
|
|
376823598e | ||
|
|
54bcbfa546 | ||
|
|
5421e54cb0 | ||
|
|
032bada221 | ||
|
|
670ee6c5b0 | ||
|
|
309fb199f2 | ||
|
|
79328b1cec | ||
|
|
5570d20625 | ||
|
|
ba2cb1dec8 | ||
|
|
4d15544a3e | ||
|
|
955ff32dcd | ||
|
|
b746b6f2d6 | ||
|
|
b4b0f802e1 | ||
|
|
5f16d5f5f1 | ||
|
|
8db04398c7 | ||
|
|
485dad01b7 | ||
|
|
cf48f47a8c | ||
|
|
f2667f5afa | ||
|
|
d73118d226 | ||
|
|
dd9e9a8c56 | ||
|
|
76d363f22d | ||
|
|
aaaa6c1393 | ||
|
|
bed82215a0 | ||
|
|
f8aaa5cb32 | ||
|
|
1e489a0666 | ||
|
|
edc7181843 | ||
|
|
89e5c08a1f | ||
|
|
0faa9e8865 | ||
|
|
f205c4d0e2 | ||
|
|
344b2bc0eb | ||
|
|
817aad7c8b | ||
|
|
d82555e644 | ||
|
|
f3e6ed56b9 | ||
|
|
780d1c67e9 | ||
|
|
2b72397a4d | ||
|
|
6c13ffaa01 | ||
|
|
eb8e124971 | ||
|
|
1bc77546eb | ||
|
|
5a453653e2 | ||
|
|
16f17829b6 | ||
|
|
3cf1c04a83 | ||
|
|
bc90ccc555 | ||
|
|
90a332a02c | ||
|
|
0098d1bdd5 | ||
|
|
f6fef18a73 | ||
|
|
6563ec6770 | ||
|
|
755cf8619f | ||
|
|
c6d389100c | ||
|
|
20c4b65273 | ||
|
|
86c94c7508 | ||
|
|
798ece411e | ||
|
|
654c9ca273 | ||
|
|
628d85080f | ||
|
|
865e9fe233 | ||
|
|
0eb765c3e8 | ||
|
|
ddeb741a85 | ||
|
|
b9bcff22f8 | ||
|
|
2d52226732 | ||
|
|
ec34197b59 | ||
|
|
edc0e6f859 | ||
|
|
61cb5103ed | ||
|
|
d364436817 | ||
|
|
827fcba277 | ||
|
|
3104417076 | ||
|
|
047f7c3619 | ||
|
|
a548c32c1f | ||
|
|
ea911e73c6 | ||
|
|
6b7fb286f7 | ||
|
|
b40479632b | ||
|
|
c122c60d3f | ||
|
|
4f08b5fa20 | ||
|
|
3bf64ae7da | ||
|
|
822c2d2d56 | ||
|
|
98e0a934ac | ||
|
|
ceffcd6360 | ||
|
|
37442ff829 | ||
|
|
de5f66b3a0 | ||
|
|
e37096f66f | ||
|
|
e49ecd4dfe | ||
|
|
4718df271f | ||
|
|
17bb3ebbf5 | ||
|
|
5e00c1c676 | ||
|
|
a9ef7ff58e | ||
|
|
518091f856 | ||
|
|
feb30f36df | ||
|
|
bbad36717f | ||
|
|
329ef7aef3 | ||
|
|
2b2115e5f0 | ||
|
|
ba5705a54f | ||
|
|
ea94626b82 | ||
|
|
fdfea68576 | ||
|
|
4e61c2b2e6 | ||
|
|
7c959754a0 | ||
|
|
fed16974dd | ||
|
|
ea3303aa76 | ||
|
|
1dc80f04cb | ||
|
|
c316ae369b | ||
|
|
d94b284815 | ||
|
|
63bb3644f6 | ||
|
|
880f08599a | ||
|
|
71472a6a82 | ||
|
|
7f36163c3b | ||
|
|
e560fa3be0 | ||
|
|
b274665e21 | ||
|
|
a499905605 | ||
|
|
b8bdc10f25 | ||
|
|
e08606af6e | ||
|
|
52ab07c673 | ||
|
|
046d8456e2 | ||
|
|
3314c59828 | ||
|
|
2103a499eb | ||
|
|
7ab779e78a | ||
|
|
49390c9427 | ||
|
|
6f29d64325 | ||
|
|
065724befb | ||
|
|
e877beea4e | ||
|
|
3e848e6e0f | ||
|
|
2e5656e1ce | ||
|
|
438650bf17 | ||
|
|
7df0b621a5 | ||
|
|
4cd755f641 | ||
|
|
8597911d85 | ||
|
|
befb80bddf | ||
|
|
7af6983cab | ||
|
|
16d6bb7334 | ||
|
|
49b658a944 | ||
|
|
e1d8680698 | ||
|
|
ee72e2d1fd | ||
|
|
e0ea4a4625 | ||
|
|
c2a9ac332a | ||
|
|
bf368aadd0 | ||
|
|
54e72d5b60 | ||
|
|
cf7422346a | ||
|
|
5b8c9ef5fc | ||
|
|
f56974f158 | ||
|
|
427508edf1 | ||
|
|
311b259cff | ||
|
|
fce7b03324 | ||
|
|
79956d6a7b | ||
|
|
978b072bff | ||
|
|
9c6f695dbf | ||
|
|
0100fcbb23 | ||
|
|
1745da0d60 | ||
|
|
e4e906ce2b | ||
|
|
80c7b97fec | ||
|
|
270e70a958 | ||
|
|
082bf6fb8e | ||
|
|
38296d9426 | ||
|
|
8311313e6e | ||
|
|
ac292999ef | ||
|
|
f3cda54cd1 | ||
|
|
8f9a294529 | ||
|
|
702de0cac3 | ||
|
|
ca42762841 | ||
|
|
13cfd6f904 | ||
|
|
18c4e6029f | ||
|
|
6c34e37838 | ||
|
|
2c28348b56 | ||
|
|
79e541244e | ||
|
|
74afad5976 | ||
|
|
c694c9791b | ||
|
|
11ceb8bde5 | ||
|
|
20ec8cb57b | ||
|
|
bfc11a545b | ||
|
|
4866af31cb | ||
|
|
0ea4da03a7 | ||
|
|
41bcc12cc2 | ||
|
|
475c231c6f | ||
|
|
e00dd46b22 | ||
|
|
fd425aa618 | ||
|
|
e1dde85c59 | ||
|
|
01207a284d | ||
|
|
0f863ab378 | ||
|
|
258064b339 | ||
|
|
2bcb37f3e9 | ||
|
|
81f8c64b2c | ||
|
|
3b80112521 | ||
|
|
459feea31e | ||
|
|
eea5839390 | ||
|
|
c79414ebd9 | ||
|
|
ed1775e689 | ||
|
|
cd50f20a20 | ||
|
|
c8ec70c05f | ||
|
|
5e3ee3a80d | ||
|
|
29726c3ce1 | ||
|
|
6804c92861 | ||
|
|
a32077566b | ||
|
|
283bcb4c91 | ||
|
|
f68ee628d9 | ||
|
|
bd5ba97ee8 | ||
|
|
325594034e | ||
|
|
28261ac51c | ||
|
|
23ef52f405 | ||
|
|
2b45793bc2 | ||
|
|
2f96cc0050 | ||
|
|
d36e8254f3 | ||
|
|
405fab8514 | ||
|
|
ee4f62a1b3 | ||
|
|
d97e4a9a95 | ||
|
|
1cfba87114 | ||
|
|
b145ed315a | ||
|
|
1f47b8c090 | ||
|
|
1e3f2a1438 | ||
|
|
d61b2bbfc6 | ||
|
|
e1d6b4a9ac | ||
|
|
9f398337c6 | ||
|
|
765bf1d11c | ||
|
|
49a96ccee0 | ||
|
|
c45bbcaea2 | ||
|
|
ab87aedfc7 | ||
|
|
18e3ad8b22 | ||
|
|
c342eafa8d | ||
|
|
290c44f5d5 | ||
|
|
02015ec404 | ||
|
|
98b4f447d8 | ||
|
|
272691e386 | ||
|
|
1012cee39a | ||
|
|
19a5733d0d | ||
|
|
86788f1445 | ||
|
|
1d5e7e930e | ||
|
|
da6d568906 | ||
|
|
abf05d5ebe | ||
|
|
5821033e3d | ||
|
|
5c1039d07d | ||
|
|
b874d6d1a4 | ||
|
|
68a47cbf0b | ||
|
|
ccc8917706 | ||
|
|
4c4d3a45c2 | ||
|
|
6007b252d7 | ||
|
|
3f05b9f921 | ||
|
|
67ec1aea42 | ||
|
|
f5a0b9c174 | ||
|
|
7035445d6a | ||
|
|
a899ff16e3 | ||
|
|
d77c9e3bd5 | ||
|
|
0ab21b6fc5 | ||
|
|
485237caf1 | ||
|
|
9181aebc8c | ||
|
|
eec3f13610 | ||
|
|
935d077836 | ||
|
|
d06aac947d | ||
|
|
1856837d21 | ||
|
|
bac6a013ed | ||
|
|
aef68f0b41 | ||
|
|
7fbe4d0aad | ||
|
|
2a1c6bf0ca | ||
|
|
8e6de2790e | ||
|
|
94a20b4510 | ||
|
|
21558dcf8b | ||
|
|
85d913520b | ||
|
|
f89b6281da | ||
|
|
75de53eb83 | ||
|
|
b0dd77bfd8 | ||
|
|
51b0f6e325 | ||
|
|
9153be489c | ||
|
|
90731e05f5 | ||
|
|
9270f7290e | ||
|
|
4e3d25c714 | ||
|
|
e44cfef662 | ||
|
|
6c138a21d4 | ||
|
|
b5a1dc86a5 | ||
|
|
3520a83c2f | ||
|
|
93f2ef45f5 | ||
|
|
8291ec17d4 | ||
|
|
5007855904 | ||
|
|
754cf17c90 | ||
|
|
6972d0337f | ||
|
|
8e3a021b0f | ||
|
|
1f85da64e4 | ||
|
|
fbeaed930e | ||
|
|
fcf532f13e |
16
.codecov.yml
16
.codecov.yml
@@ -1,18 +1,18 @@
|
||||
codecov:
|
||||
require_ci_to_pass: true
|
||||
# https://docs.codecov.com/docs/flags#recommended-automatic-flag-management
|
||||
# Require each flag to have 1 upload before notification
|
||||
flag_management:
|
||||
individual_flags:
|
||||
- name: backend
|
||||
# https://docs.codecov.com/docs/components
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: backend
|
||||
paths:
|
||||
- src/
|
||||
- name: frontend
|
||||
- src/**
|
||||
- component_id: frontend
|
||||
paths:
|
||||
- src-ui/
|
||||
- src-ui/**
|
||||
# https://docs.codecov.com/docs/pull-request-comments
|
||||
# codecov will only comment if coverage changes
|
||||
comment:
|
||||
layout: "header, diff, components, flags, files"
|
||||
require_changes: true
|
||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||
require_bundle_changes: true
|
||||
|
||||
@@ -76,18 +76,15 @@ RUN set -eux \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES}
|
||||
|
||||
ARG PYTHON_PACKAGES="\
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-wheel \
|
||||
pipenv \
|
||||
ca-certificates"
|
||||
ARG PYTHON_PACKAGES="ca-certificates"
|
||||
|
||||
RUN set -eux \
|
||||
echo "Installing python packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet ${PYTHON_PACKAGES}
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing pre-built updates" \
|
||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||
@@ -123,13 +120,15 @@ RUN set -eux \
|
||||
WORKDIR /usr/src/paperless/src/docker/
|
||||
|
||||
COPY [ \
|
||||
"docker/imagemagick-policy.xml", \
|
||||
"docker/rootfs/etc/ImageMagick-6/paperless-policy.xml", \
|
||||
"./" \
|
||||
]
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Configuring ImageMagick" \
|
||||
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
&& mv paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
||||
|
||||
# Packages needed only for building a few quick Python
|
||||
# dependencies
|
||||
@@ -140,18 +139,17 @@ ARG BUILD_PACKAGES="\
|
||||
libpq-dev \
|
||||
# https://github.com/PyMySQL/mysqlclient#linux
|
||||
default-libmysqlclient-dev \
|
||||
pkg-config \
|
||||
pre-commit"
|
||||
pkg-config"
|
||||
|
||||
# hadolint ignore=DL3042
|
||||
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
||||
RUN --mount=type=cache,target=/root/.cache/uv,id=pip-cache \
|
||||
set -eux \
|
||||
&& echo "Installing build system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet ${BUILD_PACKAGES}
|
||||
|
||||
RUN set -eux \
|
||||
&& npm update npm -g
|
||||
&& npm update -g pnpm
|
||||
|
||||
# add users, setup scripts
|
||||
# Mount the compiled frontend to expected location
|
||||
@@ -169,9 +167,6 @@ RUN set -eux \
|
||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/.venv \
|
||||
&& echo "Adjusting all permissions" \
|
||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless
|
||||
# && echo "Collecting static files" \
|
||||
# && gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||
# && gosu paperless python3 manage.py compilemessages
|
||||
|
||||
VOLUME ["/usr/src/paperless/paperless-ngx/data", \
|
||||
"/usr/src/paperless/paperless-ngx/media", \
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Paperless NGX Development Environment
|
||||
# Paperless-ngx Development Environment
|
||||
|
||||
## Overview
|
||||
|
||||
Welcome to the Paperless NGX development environment! This setup uses VSCode DevContainers to provide a consistent and seamless development experience.
|
||||
Welcome to the Paperless-ngx development environment! This setup uses VSCode DevContainers to provide a consistent and seamless development experience.
|
||||
|
||||
### What are DevContainers?
|
||||
|
||||
@@ -17,7 +17,7 @@ DevContainers are a feature in VSCode that allows you to develop within a Docker
|
||||
|
||||
## DevContainer Setup
|
||||
|
||||
The DevContainer configuration provides up all the necessary services for Paperless NGX, including:
|
||||
The DevContainer configuration provides up all the necessary services for Paperless-ngx, including:
|
||||
|
||||
- Redis
|
||||
- Gotenberg
|
||||
@@ -36,7 +36,7 @@ The setup includes debugging configurations (`launch.json`) and tasks (`tasks.js
|
||||
- **Maintenance Tasks:**
|
||||
- Create superuser
|
||||
- Run migrations
|
||||
- Recreate virtual environment (`.venv` with pipenv)
|
||||
- Recreate virtual environment (`.venv` with `uv`)
|
||||
- Compile frontend assets
|
||||
|
||||
## Getting Started
|
||||
@@ -85,7 +85,7 @@ Once the DevContainer is up and running, perform the following steps:
|
||||
|
||||
You can start and debug backend services either as debugging sessions via `launch.json` or as tasks.
|
||||
|
||||
#### Using `launch.json`:
|
||||
#### Using `launch.json`
|
||||
|
||||
1. Press `F5` or go to the **Run and Debug** view in VSCode.
|
||||
2. Select the desired configuration:
|
||||
@@ -93,7 +93,7 @@ You can start and debug backend services either as debugging sessions via `launc
|
||||
- `Document Consumer`
|
||||
- `Celery`
|
||||
|
||||
#### Using Tasks:
|
||||
#### Using Tasks
|
||||
|
||||
1. Open the command palette:
|
||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||
@@ -108,7 +108,7 @@ You can start and debug backend services either as debugging sessions via `launc
|
||||
|
||||
Additional tasks are available for common maintenance operations:
|
||||
|
||||
- **Recreate .venv**: For setting up the virtual environment using pipenv.
|
||||
- **Recreate .venv**: For setting up the virtual environment using `uv`.
|
||||
- **Migrate Database**: To apply database migrations.
|
||||
- **Create Superuser**: To create an admin user for the application.
|
||||
|
||||
|
||||
@@ -3,14 +3,26 @@
|
||||
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
||||
"service": "paperless-development",
|
||||
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
||||
"postCreateCommand": "/bin/bash -c pre-commit install && pipenv install --dev",
|
||||
"postCreateCommand": "/bin/bash -c 'uv sync --group dev && uv run pre-commit install'",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python"
|
||||
]
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-vscode.js-debug-nightly",
|
||||
"eamodio.gitlens",
|
||||
"yzhang.markdown-all-in-one"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
|
||||
@@ -21,19 +21,17 @@
|
||||
# This file is intended only to be used through VSCOde devcontainers. See README.md
|
||||
# in the folder .devcontainer.
|
||||
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
- ./redisdata:/data
|
||||
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
|
||||
paperless-development:
|
||||
image: paperless-ngx
|
||||
build:
|
||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
||||
dockerfile: ./.devcontainer/Dockerfile
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
@@ -43,14 +41,16 @@ services:
|
||||
volumes:
|
||||
- ..:/usr/src/paperless/paperless-ngx:delegated
|
||||
- ../.devcontainer/vscode:/usr/src/paperless/paperless-ngx/.vscode:delegated # VSCode config files
|
||||
- pipenv:/usr/src/paperless/paperless-ngx/.venv # Pipenv environment persisted in volume
|
||||
- virtualenv:/usr/src/paperless/paperless-ngx/.venv # Virtual environment persisted in volume
|
||||
- /usr/src/paperless/paperless-ngx/src/documents/static/frontend # Static frontend files exist only in container
|
||||
- /usr/src/paperless/paperless-ngx/src/.pytest_cache
|
||||
- /usr/src/paperless/paperless-ngx/.ruff_cache
|
||||
- /usr/src/paperless/paperless-ngx/htmlcov
|
||||
- /usr/src/paperless/paperless-ngx/.coverage
|
||||
- data:/usr/src/paperless/paperless-ngx/data
|
||||
- media:/usr/src/paperless/paperless-ngx/media
|
||||
- ./data:/usr/src/paperless/paperless-ngx/data
|
||||
- ./media:/usr/src/paperless/paperless-ngx/media
|
||||
- ./consume:/usr/src/paperless/paperless-ngx/consume
|
||||
- ~/.gitconfig:/usr/src/paperless/.gitconfig:ro
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
@@ -58,27 +58,22 @@ services:
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
PAPERLESS_STATICDIR: ./src/documents/static
|
||||
PAPERLESS_DEBUG: true
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:7.10
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
restart: unless-stopped
|
||||
|
||||
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even JavaScript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
redisdata:
|
||||
pipenv:
|
||||
virtualenv:
|
||||
|
||||
@@ -2,42 +2,57 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "manage.py runserver",
|
||||
"name": "Chrome: Debug Angular Frontend",
|
||||
"description": "Debug the Angular Dev Frontend in Chrome",
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"url": "http://localhost:4200",
|
||||
"webRoot": "${workspaceFolder}/src-ui",
|
||||
"preLaunchTask": "Start: Frontend Angular"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Backend Server (manage.py runserver)",
|
||||
"description": "Debug the Django Backend Server",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/manage.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"args": ["runserver"],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "manage.py document_consumer",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/manage.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"args": ["document_consumer"],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "celery",
|
||||
"type": "python",
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"request": "launch",
|
||||
"module": "celery",
|
||||
"args": [
|
||||
"runserver"
|
||||
],
|
||||
"django": true,
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src"
|
||||
},
|
||||
},
|
||||
"python": "${workspaceFolder}/.venv/bin/python"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Consumer Service (manage.py document_consumer)",
|
||||
"description": "Debug the Consumer Service which processes files from a directory",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/manage.py",
|
||||
"args": [
|
||||
"-A",
|
||||
"paperless",
|
||||
"worker",
|
||||
"-l",
|
||||
"DEBUG"
|
||||
]
|
||||
"document_consumer"
|
||||
],
|
||||
"django": true,
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src"
|
||||
},
|
||||
"python": "${workspaceFolder}/.venv/bin/python"
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Debug: FullStack",
|
||||
"description": "Debug run the Angular dev frontend, Django backend, and consumer service",
|
||||
"configurations": [
|
||||
"Chrome: Debug Angular Frontend",
|
||||
"Debug: Backend Server (manage.py runserver)",
|
||||
"Debug: Consumer Service (manage.py document_consumer)"
|
||||
],
|
||||
"preLaunchTask": "Start: Celery Worker"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,29 +1,67 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "manage.py document_consumer",
|
||||
"type": "shell",
|
||||
"command": "pipenv run python manage.py document_consumer",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
|
||||
{
|
||||
"label": "Start: Celery Worker",
|
||||
"description": "Start the Celery Worker which processes background and consume tasks",
|
||||
"type": "shell",
|
||||
"command": "uv run celery --app paperless worker -l DEBUG",
|
||||
"isBackground": true,
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
},
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "custom",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": ".",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "celery.*",
|
||||
"endsPattern": "ready"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "manage.py runserver",
|
||||
"label": "Start: Frontend Angular",
|
||||
"description": "Start the Frontend Angular Dev Server",
|
||||
"type": "shell",
|
||||
"command": "pipenv run python manage.py runserver",
|
||||
"command": "pnpm start",
|
||||
"isBackground": true,
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
},
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "custom",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": ".",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": ".*",
|
||||
"endsPattern": "Compiled successfully"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Start: Consumer Service (manage.py document_consumer)",
|
||||
"description": "Start the Consumer Service which processes files from a directory",
|
||||
"type": "shell",
|
||||
"command": "uv run python manage.py document_consumer",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
@@ -37,100 +75,149 @@
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"label": "Start: Backend Server (manage.py runserver)",
|
||||
"description": "Start the Backend Server which serves the Django API and the compiled Angular frontend",
|
||||
"type": "shell",
|
||||
"command": "uv run python manage.py runserver",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: manage.py migrate",
|
||||
"type": "shell",
|
||||
"command": "pipenv run python manage.py migrate",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
{
|
||||
"label": "Maintenance: manage.py migrate",
|
||||
"description": "Apply database migrations",
|
||||
"type": "shell",
|
||||
"command": "uv run python manage.py migrate",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: Build Documentation",
|
||||
"description": "Build the documentation with MkDocs",
|
||||
"type": "shell",
|
||||
"command": "uv run mkdocs build --config-file mkdocs.yml && uv run mkdocs serve",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: manage.py createsuperuser",
|
||||
"description": "Create a superuser",
|
||||
"type": "shell",
|
||||
"command": "uv run python manage.py createsuperuser",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: recreate .venv",
|
||||
"description": "Recreate the python virtual environment and install python dependencies",
|
||||
"type": "shell",
|
||||
"command": "rm -R -v .venv/* || uv install --dev",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: Install Frontend Dependencies",
|
||||
"description": "Install frontend (pnpm) dependencies",
|
||||
"type": "pnpm",
|
||||
"script": "install",
|
||||
"path": "src-ui",
|
||||
"group": "clean",
|
||||
"problemMatcher": [],
|
||||
"detail": "install dependencies from package"
|
||||
},
|
||||
{
|
||||
"description": "Clean install frontend dependencies and build the frontend for production",
|
||||
"label": "Maintenance: Compile frontend for production",
|
||||
"type": "shell",
|
||||
"command": "pnpm install && ./node_modules/.bin/ng build --configuration production",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Project Setup: Run all Init Tasks",
|
||||
"description": "Runs all init tasks to setup the project including migrate the database, create a superuser and compile the frontend for production",
|
||||
"dependsOrder": "sequence",
|
||||
"dependsOn": [
|
||||
"Maintenance: manage.py migrate",
|
||||
"Maintenance: manage.py createsuperuser",
|
||||
"Maintenance: Compile frontend for production"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Project Start: Run all Services",
|
||||
"description": "Runs all services required to start the project including the Celery Worker, the Consumer Service and the Backend Server",
|
||||
"dependsOn": [
|
||||
"Start: Celery Worker",
|
||||
"Start: Consumer Service (manage.py document_consumer)",
|
||||
"Start: Backend Server (manage.py runserver)"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: manage.py createsuperuser",
|
||||
"type": "shell",
|
||||
"command": "pipenv run python manage.py createsuperuser",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "compile frontend",
|
||||
"type": "shell",
|
||||
"command": "npm ci && ./node_modules/.bin/ng build --configuration production",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Maintenance: recreate .venv",
|
||||
"type": "shell",
|
||||
"command": "rm -R -v .venv/* || pipenv install --dev",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Celery Worker",
|
||||
"type": "shell",
|
||||
"command": "pipenv run celery --app paperless worker -l DEBUG",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,3 +26,5 @@
|
||||
./dist
|
||||
./scripts
|
||||
./resources
|
||||
# Other stuff
|
||||
**/*.drawio.png
|
||||
|
||||
@@ -27,9 +27,6 @@ indent_style = space
|
||||
[*.md]
|
||||
indent_style = space
|
||||
|
||||
[Pipfile.lock]
|
||||
indent_style = space
|
||||
|
||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||
# violate it.
|
||||
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
github: [shamoon, stumpylog]
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -2,10 +2,10 @@ blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🤔 Questions and Help
|
||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
||||
about: This issue tracker is not for support questions. Please refer to our Discussions.
|
||||
about: General questions or support for using Paperless-ngx.
|
||||
- name: 💬 Chat
|
||||
url: https://matrix.to/#/#paperlessngx:matrix.org
|
||||
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
||||
- name: 🚀 Feature Request
|
||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
||||
about: Remember to search for existing feature requests and "up-vote" any you like
|
||||
about: Remember to search for existing feature requests and "up-vote" those that you like.
|
||||
|
||||
67
.github/dependabot.yml
vendored
67
.github/dependabot.yml
vendored
@@ -1,12 +1,14 @@
|
||||
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates#package-ecosystem
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
# Required for uv support for now
|
||||
enable-beta-ecosystems: true
|
||||
updates:
|
||||
|
||||
# Enable version updates for npm
|
||||
# Enable version updates for pnpm
|
||||
- package-ecosystem: "npm"
|
||||
target-branch: "dev"
|
||||
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||
# Look for `pnpm-lock.yaml` file in the `/src-ui` directory
|
||||
directory: "/src-ui"
|
||||
open-pull-requests-limit: 10
|
||||
schedule:
|
||||
@@ -32,11 +34,9 @@ updates:
|
||||
patterns:
|
||||
- "@typescript-eslint*"
|
||||
- "eslint"
|
||||
|
||||
# Enable version updates for Python
|
||||
- package-ecosystem: "pip"
|
||||
- package-ecosystem: "uv"
|
||||
target-branch: "dev"
|
||||
# Look for a `Pipfile` in the `root` directory
|
||||
directory: "/"
|
||||
# Check for updates once a week
|
||||
schedule:
|
||||
@@ -47,14 +47,13 @@ updates:
|
||||
# Add reviewers
|
||||
reviewers:
|
||||
- "paperless-ngx/backend"
|
||||
ignore:
|
||||
- dependency-name: "uvicorn"
|
||||
groups:
|
||||
development:
|
||||
patterns:
|
||||
- "*pytest*"
|
||||
- "ruff"
|
||||
- "mkdocs-material"
|
||||
- "pre-commit*"
|
||||
django:
|
||||
patterns:
|
||||
- "*django*"
|
||||
@@ -65,7 +64,10 @@ updates:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
pre-built:
|
||||
patterns:
|
||||
- psycopg*
|
||||
- zxing-cpp
|
||||
# Enable updates for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
target-branch: "dev"
|
||||
@@ -85,3 +87,48 @@ updates:
|
||||
- "major"
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Update Dockerfile in root directory
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
reviewers:
|
||||
- "paperless-ngx/ci-cd"
|
||||
labels:
|
||||
- "ci-cd"
|
||||
- "dependencies"
|
||||
commit-message:
|
||||
prefix: "docker"
|
||||
include: "scope"
|
||||
# Update Docker Compose files in docker/compose directory
|
||||
- package-ecosystem: "docker-compose"
|
||||
directory: "/docker/compose/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
reviewers:
|
||||
- "paperless-ngx/ci-cd"
|
||||
labels:
|
||||
- "ci-cd"
|
||||
- "dependencies"
|
||||
commit-message:
|
||||
prefix: "docker-compose"
|
||||
include: "scope"
|
||||
groups:
|
||||
# Individual groups for each image
|
||||
gotenberg:
|
||||
patterns:
|
||||
- "docker.io/gotenberg/gotenberg*"
|
||||
tika:
|
||||
patterns:
|
||||
- "docker.io/apache/tika*"
|
||||
redis:
|
||||
patterns:
|
||||
- "docker.io/library/redis*"
|
||||
mariadb:
|
||||
patterns:
|
||||
- "docker.io/library/mariadb*"
|
||||
postgres:
|
||||
patterns:
|
||||
- "docker.io/library/postgres*"
|
||||
|
||||
460
.github/workflows/ci.yml
vendored
460
.github/workflows/ci.yml
vendored
@@ -1,5 +1,4 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
@@ -12,87 +11,73 @@ on:
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
|
||||
env:
|
||||
# This is the version of pipenv all the steps will use
|
||||
# If changing this, change Dockerfile
|
||||
DEFAULT_PIP_ENV_VERSION: "2024.4.0"
|
||||
DEFAULT_UV_VERSION: "0.6.x"
|
||||
# This is the default version of Python to use in most steps which aren't specific
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||
# by the push to the branch. Without this if check, checks are duplicated since
|
||||
# internal PRs match both the push and pull_request events.
|
||||
if:
|
||||
github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=
|
||||
github.repository
|
||||
|
||||
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout repository
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Install python
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Check files
|
||||
- name: Check files
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
documentation:
|
||||
name: "Build & Deploy Documentation"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||
-
|
||||
name: Install dependencies
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Make documentation
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||
-
|
||||
name: Make documentation
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs build --config-file ./mkdocs.yml
|
||||
-
|
||||
name: Deploy documentation
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
- name: Deploy documentation
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs gh-deploy --force --no-history
|
||||
-
|
||||
name: Upload artifact
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
|
||||
tests-backend:
|
||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -103,46 +88,40 @@ jobs:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Start containers
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||
-
|
||||
name: Install system dependencies
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
-
|
||||
name: Configure ImageMagick
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
-
|
||||
name: Install Python dependencies
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python --version
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||
-
|
||||
name: Tests
|
||||
uv pip list
|
||||
- name: Tests
|
||||
env:
|
||||
PAPERLESS_CI_TEST: 1
|
||||
# Enable paperless_mail testing against real server
|
||||
@@ -150,60 +129,65 @@ jobs:
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
cd src/
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
||||
-
|
||||
name: Upload coverage
|
||||
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
||||
uses: actions/upload-artifact@v4
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
- name: Upload backend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
name: backend-coverage-report
|
||||
path: src/coverage.xml
|
||||
retention-days: 7
|
||||
if-no-files-found: warn
|
||||
-
|
||||
name: Stop containers
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
- name: Upload backend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||
|
||||
install-frontend-depedendencies:
|
||||
install-frontend-dependencies:
|
||||
name: "Install Frontend Dependencies"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
-
|
||||
name: Use Node.js 20
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
cache-dependency-path: 'src-ui/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.npm
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
-
|
||||
name: Install dependencies
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install dependencies
|
||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||
run: cd src-ui && npm ci
|
||||
-
|
||||
name: Install Playwright
|
||||
run: cd src-ui && pnpm install
|
||||
- name: Install Playwright
|
||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||
run: cd src-ui && npx playwright install --with-deps
|
||||
|
||||
run: cd src-ui && pnpm playwright install --with-deps
|
||||
tests-frontend:
|
||||
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- install-frontend-depedendencies
|
||||
- install-frontend-dependencies
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -212,125 +196,76 @@ jobs:
|
||||
shard-count: [4]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
-
|
||||
name: Use Node.js 20
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
cache-dependency-path: 'src-ui/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.npm
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && npm link @angular/cli
|
||||
-
|
||||
name: Linting checks
|
||||
run: cd src-ui && npm run lint
|
||||
-
|
||||
name: Run Jest unit tests
|
||||
run: cd src-ui && npm run test -- --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
-
|
||||
name: Upload Jest coverage
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Linting checks
|
||||
run: cd src-ui && pnpm run lint
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Run Playwright e2e tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Upload frontend test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jest-coverage-report-${{ matrix.shard-index }}
|
||||
path: |
|
||||
src-ui/coverage/coverage-final.json
|
||||
src-ui/coverage/lcov.info
|
||||
src-ui/coverage/clover.xml
|
||||
retention-days: 7
|
||||
if-no-files-found: warn
|
||||
-
|
||||
name: Run Playwright e2e tests
|
||||
run: cd src-ui && npx playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
-
|
||||
name: Upload Playwright test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
- name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
name: playwright-report-${{ matrix.shard-index }}
|
||||
path: src-ui/playwright-report
|
||||
retention-days: 7
|
||||
|
||||
tests-coverage-upload:
|
||||
name: "Upload to Codecov"
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
frontend-bundle-analysis:
|
||||
name: "Frontend Bundle Analysis"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
steps:
|
||||
-
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Download frontend jest coverage
|
||||
uses: actions/download-artifact@v4
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
path: src-ui/coverage/
|
||||
pattern: jest-coverage-report-*
|
||||
-
|
||||
name: Download frontend playwright coverage
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: src-ui/coverage/
|
||||
pattern: playwright-report-*
|
||||
merge-multiple: true
|
||||
-
|
||||
name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
# not required for public repos, but intermittently fails otherwise
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend
|
||||
directory: src-ui/coverage/
|
||||
# dont include backend coverage files here
|
||||
files: '!coverage.xml'
|
||||
-
|
||||
name: Download backend coverage
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: backend-coverage-report
|
||||
path: src/
|
||||
-
|
||||
name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
# not required for public repos, but intermittently fails otherwise
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
# future expansion
|
||||
flags: backend
|
||||
directory: src/
|
||||
-
|
||||
name: Use Node.js 20
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
cache-dependency-path: 'src-ui/package-lock.json'
|
||||
-
|
||||
name: Cache frontend dependencies
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.npm
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
-
|
||||
name: Re-link Angular cli
|
||||
run: cd src-ui && npm link @angular/cli
|
||||
-
|
||||
name: Build frontend and upload analysis
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Build frontend and upload analysis
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && ng build --configuration=production
|
||||
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
build-docker-image:
|
||||
name: Build Docker image for ${{ github.ref_name }}
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -342,8 +277,7 @@ jobs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
steps:
|
||||
-
|
||||
name: Check pushing to Docker Hub
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
@@ -359,15 +293,13 @@ jobs:
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Set ghcr repository name
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Gather Docker metadata
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
@@ -382,37 +314,31 @@ jobs:
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||
# the append input with a native arm64 arch could be used to
|
||||
# significantly speed up building
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
-
|
||||
name: Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
@@ -420,8 +346,7 @@ jobs:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
@@ -439,23 +364,19 @@ jobs:
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: |
|
||||
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
-
|
||||
name: Inspect image
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
-
|
||||
name: Export frontend artifact from docker
|
||||
- name: Export frontend artifact from docker
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
-
|
||||
name: Upload frontend artifact
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
|
||||
build-release:
|
||||
name: "Build Release"
|
||||
needs:
|
||||
@@ -463,58 +384,52 @@ jobs:
|
||||
- documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv + tools
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||
-
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: Install system dependencies
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
-
|
||||
name: Download frontend artifact
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
-
|
||||
name: Download documentation artifact
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
-
|
||||
name: Generate requirements file
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} requirements > requirements.txt
|
||||
-
|
||||
name: Compile messages
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py compilemessages
|
||||
-
|
||||
name: Collect static files
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py collectstatic --no-input
|
||||
-
|
||||
name: Move files
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
@@ -528,13 +443,12 @@ jobs:
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
Pipfile \
|
||||
Pipfile.lock \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example \
|
||||
gunicorn.conf.py
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
@@ -552,21 +466,18 @@ jobs:
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
-
|
||||
name: Make release package
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
-
|
||||
name: Upload release artifact
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
|
||||
publish-release:
|
||||
name: "Publish Release"
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -578,14 +489,12 @@ jobs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
-
|
||||
name: Download release artifact
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
-
|
||||
name: Get version
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
@@ -594,8 +503,7 @@ jobs:
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Create Release and Changelog
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
@@ -606,8 +514,7 @@ jobs:
|
||||
publish: true # ensures release is not marked as draft
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Upload release archive
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
@@ -616,7 +523,6 @@ jobs:
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
|
||||
append-changelog:
|
||||
name: "Append Changelog"
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -624,24 +530,22 @@ jobs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv + tools
|
||||
run: |
|
||||
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||
-
|
||||
name: Append Changelog to docs
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
@@ -655,13 +559,15 @@ jobs:
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
pipenv run pre-commit run --files changelog.md || true
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
-
|
||||
name: Create Pull Request
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
|
||||
14
.github/workflows/cleanup-tags.yml
vendored
14
.github/workflows/cleanup-tags.yml
vendored
@@ -6,17 +6,14 @@
|
||||
# This workflow will not trigger runs on forked repos.
|
||||
|
||||
name: Cleanup Image Tags
|
||||
|
||||
on:
|
||||
delete:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/cleanup-tags.yml"
|
||||
|
||||
concurrency:
|
||||
group: registry-tags-cleanup
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||
@@ -30,10 +27,9 @@ jobs:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean temporary images
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "${{ github.repository_owner }}"
|
||||
@@ -43,7 +39,6 @@ jobs:
|
||||
repo_name: "paperless-ngx"
|
||||
match_regex: "(feature|fix)"
|
||||
do_delete: "true"
|
||||
|
||||
cleanup-untagged-images:
|
||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
@@ -58,10 +53,9 @@ jobs:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean untagged images
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "${{ github.repository_owner }}"
|
||||
|
||||
38
.github/workflows/codeql-analysis.yml
vendored
38
.github/workflows/codeql-analysis.yml
vendored
@@ -10,16 +10,14 @@
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
schedule:
|
||||
- cron: '28 13 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -28,27 +26,23 @@ jobs:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'python' ]
|
||||
language: ['javascript', 'python']
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
||||
39
.github/workflows/crowdin.yml
vendored
39
.github/workflows/crowdin.yml
vendored
@@ -1,35 +1,28 @@
|
||||
name: Crowdin Action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '2 */12 * * *'
|
||||
push:
|
||||
paths: [
|
||||
'src/locale/**',
|
||||
'src-ui/messages.xlf',
|
||||
'src-ui/src/locale/**'
|
||||
]
|
||||
branches: [ dev ]
|
||||
|
||||
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
|
||||
branches: [dev]
|
||||
jobs:
|
||||
synchronize-with-crowdin:
|
||||
name: Crowdin Sync
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@v2
|
||||
with:
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
crowdin_branch_name: 'dev'
|
||||
localization_branch_name: l10n_dev
|
||||
pull_request_labels: 'skip-changelog, translation'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@v2
|
||||
with:
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
crowdin_branch_name: 'dev'
|
||||
localization_branch_name: l10n_dev
|
||||
pull_request_labels: 'skip-changelog, translation'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
|
||||
3
.github/workflows/project-actions.yml
vendored
3
.github/workflows/project-actions.yml
vendored
@@ -1,5 +1,4 @@
|
||||
name: Project Automations
|
||||
|
||||
on:
|
||||
pull_request_target: #_target allows access to secrets
|
||||
types:
|
||||
@@ -8,10 +7,8 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
pr_opened_or_reopened:
|
||||
name: pr_opened_or_reopened
|
||||
|
||||
27
.github/workflows/repo-maintenance.yml
vendored
27
.github/workflows/repo-maintenance.yml
vendored
@@ -1,18 +1,14 @@
|
||||
name: 'Repository Maintenance'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
discussions: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: 'Stale'
|
||||
@@ -27,9 +23,8 @@ jobs:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
lock-threads:
|
||||
name: 'Lock Old Threads'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
@@ -42,20 +37,14 @@ jobs:
|
||||
discussion-inactive-days: '30'
|
||||
log-output: true
|
||||
issue-comment: >
|
||||
This issue has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion or issue for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This issue has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
pr-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion or issue for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This pull request has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
discussion-comment: >
|
||||
This discussion has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This discussion has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
close-answered-discussions:
|
||||
name: 'Close Answered Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -44,6 +44,7 @@ nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.pytest_cache
|
||||
junit.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -100,3 +101,9 @@ scripts/nuke
|
||||
|
||||
# celery schedule file
|
||||
celerybeat-schedule*
|
||||
|
||||
# ignore .devcontainer sub folders
|
||||
/.devcontainer/consume/
|
||||
/.devcontainer/data/
|
||||
/.devcontainer/media/
|
||||
/.devcontainer/redisdata/
|
||||
|
||||
@@ -29,10 +29,10 @@ repos:
|
||||
- id: check-case-conflict
|
||||
- id: detect-private-key
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
rev: v2.4.0
|
||||
hooks:
|
||||
- id: codespell
|
||||
exclude: "(^src-ui/src/locale/)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
||||
exclude: "(^src-ui/src/locale/)|(^src-ui/pnpm-lock.yaml)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
||||
exclude_types:
|
||||
- pofile
|
||||
- json
|
||||
@@ -45,16 +45,19 @@ repos:
|
||||
- javascript
|
||||
- ts
|
||||
- markdown
|
||||
exclude: "(^Pipfile\\.lock$)"
|
||||
additional_dependencies:
|
||||
- prettier@3.3.3
|
||||
- 'prettier-plugin-organize-imports@4.1.0'
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.9.9
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||
rev: "v2.5.1"
|
||||
hooks:
|
||||
- id: pyproject-fmt
|
||||
# Dockerfile hooks
|
||||
- repo: https://github.com/AleksaC/hadolint-py
|
||||
rev: v2.12.0.3
|
||||
@@ -73,3 +76,8 @@ repos:
|
||||
rev: "v0.10.0.1"
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.14.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
3.10.15
|
||||
91
.ruff.toml
91
.ruff.toml
@@ -1,91 +0,0 @@
|
||||
fix = true
|
||||
line-length = 88
|
||||
respect-gitignore = true
|
||||
src = ["src"]
|
||||
target-version = "py310"
|
||||
output-format = "grouped"
|
||||
show-fixes = true
|
||||
|
||||
# https://docs.astral.sh/ruff/settings/
|
||||
# https://docs.astral.sh/ruff/rules/
|
||||
[lint]
|
||||
extend-select = [
|
||||
"W", # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w
|
||||
"I", # https://docs.astral.sh/ruff/rules/#isort-i
|
||||
"UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
||||
"COM", # https://docs.astral.sh/ruff/rules/#flake8-commas-com
|
||||
"DJ", # https://docs.astral.sh/ruff/rules/#flake8-django-dj
|
||||
"EXE", # https://docs.astral.sh/ruff/rules/#flake8-executable-exe
|
||||
"ISC", # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc
|
||||
"ICN", # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn
|
||||
"G201", # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g
|
||||
"INP", # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp
|
||||
"PIE", # https://docs.astral.sh/ruff/rules/#flake8-pie-pie
|
||||
"Q", # https://docs.astral.sh/ruff/rules/#flake8-quotes-q
|
||||
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
|
||||
"T20", # https://docs.astral.sh/ruff/rules/#flake8-print-t20
|
||||
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
|
||||
"TID", # https://docs.astral.sh/ruff/rules/#flake8-tidy-imports-tid
|
||||
"TCH", # https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch
|
||||
"PLC", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
||||
"PLE", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
||||
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
||||
"FLY", # https://docs.astral.sh/ruff/rules/#flynt-fly
|
||||
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
|
||||
]
|
||||
ignore = ["DJ001", "SIM105", "RUF012"]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
|
||||
"docker/wait-for-redis.py" = ["INP001", "T201"]
|
||||
"src/documents/barcodes.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/classifier.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/consumer.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/file_handling.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/index.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/management/commands/decrypt_documents.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/management/commands/document_consumer.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/management/commands/document_exporter.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/management/commands/document_importer.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/migrations/0012_auto_20160305_0040.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/migrations/0014_document_checksum.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/migrations/1003_mime_types.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/migrations/1012_fix_archive_files.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/migrations/1037_webp_encrypted_thumbnail_conversion.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/models.py" = ["SIM115", "PTH"] # TODO PTH Enable & remove
|
||||
"src/documents/parsers.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/signals/handlers.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tasks.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_api_app_config.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_api_bulk_download.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_api_documents.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_classifier.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_consumer.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_file_handling.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_management.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_management_consumer.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_management_exporter.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_management_thumbnails.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_migration_archive_files.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_migration_document_pages_count.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_migration_mime_type.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_sanity_check.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_tasks.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/tests/test_views.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/documents/views.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless/checks.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless/settings.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless/tests/test_checks.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless/urls.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless/views.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless_mail/mail.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless_mail/preprocessor.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless_tesseract/parsers.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001", "PTH"] # TODO PTH Enable & remove
|
||||
"src/paperless_tika/tests/test_live_tika.py" = ["PTH"] # TODO Enable & remove
|
||||
"src/paperless_tika/tests/test_tika_parser.py" = ["PTH"] # TODO Enable & remove
|
||||
"*/tests/*.py" = ["E501", "SIM117"]
|
||||
"*/migrations/*.py" = ["E501", "SIM", "T201"]
|
||||
|
||||
[lint.isort]
|
||||
force-single-line = true
|
||||
@@ -5,5 +5,6 @@
|
||||
/src-ui/ @paperless-ngx/frontend
|
||||
|
||||
/src/ @paperless-ngx/backend
|
||||
Pipfile* @paperless-ngx/backend
|
||||
pyproject.toml @paperless-ngx/backend
|
||||
uv.lock @paperless-ngx/backend
|
||||
*.py @paperless-ngx/backend
|
||||
|
||||
182
Dockerfile
182
Dockerfile
@@ -4,15 +4,17 @@
|
||||
# Stage: compile-frontend
|
||||
# Purpose: Compiles the frontend
|
||||
# Notes:
|
||||
# - Does NPM stuff with Typescript and such
|
||||
# - Does PNPM stuff with Typescript and such
|
||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
||||
|
||||
COPY ./src-ui /src/src-ui
|
||||
|
||||
WORKDIR /src/src-ui
|
||||
RUN set -eux \
|
||||
&& npm update npm -g \
|
||||
&& npm ci
|
||||
&& npm update -g pnpm \
|
||||
&& npm install -g corepack@latest \
|
||||
&& corepack enable \
|
||||
&& pnpm install
|
||||
|
||||
ARG PNGX_TAG_VERSION=
|
||||
# Add the tag to the environment file if its a tagged dev build
|
||||
@@ -26,28 +28,66 @@ esac
|
||||
RUN set -eux \
|
||||
&& ./node_modules/.bin/ng build --configuration production
|
||||
|
||||
# Stage: pipenv-base
|
||||
# Purpose: Generates a requirements.txt file for building
|
||||
# Stage: s6-overlay-base
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - pipenv dependencies are not left in the final image
|
||||
# - pipenv can't touch the final image somehow
|
||||
FROM --platform=$BUILDPLATFORM docker.io/python:3.12-alpine AS pipenv-base
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.6.5-python3.12-bookworm-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/pipenv
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
COPY Pipfile* ./
|
||||
# https://github.com/just-containers/s6-overlay#customizing-s6-overlay-behaviour
|
||||
ENV \
|
||||
S6_BEHAVIOUR_IF_STAGE2_FAILS=2 \
|
||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
|
||||
S6_VERBOSITY=1 \
|
||||
PATH=/command:$PATH
|
||||
|
||||
# Buildx provided, must be defined to use though
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
# Lock this version
|
||||
ARG S6_OVERLAY_VERSION=3.2.0.2
|
||||
|
||||
ARG S6_BUILD_TIME_PKGS="curl \
|
||||
xz-utils"
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing pipenv" \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2024.4.0 \
|
||||
&& echo "Generating requirement.txt" \
|
||||
&& pipenv requirements > requirements.txt
|
||||
&& echo "Installing build time packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${S6_BUILD_TIME_PKGS} \
|
||||
&& echo "Determining arch" \
|
||||
&& S6_ARCH="" \
|
||||
&& if [ "${TARGETARCH}${TARGETVARIANT}" = "amd64" ]; then S6_ARCH="x86_64"; \
|
||||
elif [ "${TARGETARCH}${TARGETVARIANT}" = "arm64" ]; then S6_ARCH="aarch64"; fi\
|
||||
&& if [ -z "${S6_ARCH}" ]; then { echo "Error: Not able to determine arch"; exit 1; }; fi \
|
||||
&& echo "Installing s6-overlay for ${S6_ARCH}" \
|
||||
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz" \
|
||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz.sha256" \
|
||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz" \
|
||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz.sha256" \
|
||||
&& echo "Validating s6-archive checksums" \
|
||||
&& sha256sum --check ./*.sha256 \
|
||||
&& echo "Unpacking archives" \
|
||||
&& tar --directory / -Jxpf s6-overlay-noarch.tar.xz \
|
||||
&& tar --directory / -Jxpf s6-overlay-${S6_ARCH}.tar.xz \
|
||||
&& echo "Removing downloaded archives" \
|
||||
&& rm ./*.tar.xz \
|
||||
&& rm ./*.sha256 \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get --yes purge ${S6_BUILD_TIME_PKGS} \
|
||||
&& apt-get --yes autoremove --purge \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy our service defs and filesystem
|
||||
COPY ./docker/rootfs /
|
||||
|
||||
# Stage: main-app
|
||||
# Purpose: The final image
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here
|
||||
FROM docker.io/python:3.12-slim-bookworm AS main-app
|
||||
FROM s6-overlay-base AS main-app
|
||||
|
||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||
@@ -61,16 +101,19 @@ ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETARCH
|
||||
|
||||
# Can be workflow provided, defaults set for manual building
|
||||
ARG JBIG2ENC_VERSION=0.29
|
||||
ARG JBIG2ENC_VERSION=0.30
|
||||
ARG QPDF_VERSION=11.9.0
|
||||
ARG GS_VERSION=10.03.1
|
||||
|
||||
# Set Python environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
# Ignore warning from Whitenoise
|
||||
# Ignore warning from Whitenoise about async iterators
|
||||
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
||||
PNGX_CONTAINERIZED=1
|
||||
PNGX_CONTAINERIZED=1 \
|
||||
# https://docs.astral.sh/uv/reference/settings/#link-mode
|
||||
UV_LINK_MODE=copy \
|
||||
UV_CACHE_DIR=/cache/uv/
|
||||
|
||||
#
|
||||
# Begin installation and configuration
|
||||
@@ -127,118 +170,51 @@ RUN set -eux \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||
&& echo "Installing pre-built updates" \
|
||||
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||
&& echo "Installing jbig2enc" \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||
&& echo "Configuring imagemagick" \
|
||||
&& cp /etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||
&& echo "Cleaning up image layer" \
|
||||
&& rm --force --verbose *.deb \
|
||||
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
||||
&& echo "Installing supervisor" \
|
||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
||||
|
||||
# Copy gunicorn config
|
||||
# Changes very infrequently
|
||||
WORKDIR /usr/src/paperless/
|
||||
|
||||
COPY gunicorn.conf.py .
|
||||
|
||||
# setup docker-specific things
|
||||
# These change sometimes, but rarely
|
||||
WORKDIR /usr/src/paperless/src/docker/
|
||||
|
||||
COPY [ \
|
||||
"docker/imagemagick-policy.xml", \
|
||||
"docker/supervisord.conf", \
|
||||
"docker/docker-entrypoint.sh", \
|
||||
"docker/docker-prepare.sh", \
|
||||
"docker/paperless_cmd.sh", \
|
||||
"docker/wait-for-redis.py", \
|
||||
"docker/env-from-file.sh", \
|
||||
"docker/management_script.sh", \
|
||||
"docker/flower-conditional.sh", \
|
||||
"docker/install_management_commands.sh", \
|
||||
"/usr/src/paperless/src/docker/" \
|
||||
]
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Configuring ImageMagick" \
|
||||
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||
&& echo "Configuring supervisord" \
|
||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||
&& mv supervisord.conf /etc/supervisord.conf \
|
||||
&& echo "Setting up Docker scripts" \
|
||||
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
||||
&& chmod 755 /sbin/docker-prepare.sh \
|
||||
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||
&& chmod 755 /sbin/wait-for-redis.py \
|
||||
&& mv env-from-file.sh /sbin/env-from-file.sh \
|
||||
&& chmod 755 /sbin/env-from-file.sh \
|
||||
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
||||
&& chmod 755 /usr/local/bin/flower-conditional.sh \
|
||||
&& echo "Installing management commands" \
|
||||
&& chmod +x install_management_commands.sh \
|
||||
&& ./install_management_commands.sh
|
||||
&& rm --recursive --force --verbose /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /usr/src/paperless/src/
|
||||
|
||||
# Python dependencies
|
||||
# Change pretty frequently
|
||||
COPY --from=pipenv-base /usr/src/pipenv/requirements.txt ./
|
||||
COPY --chown=1000:1000 ["pyproject.toml", "uv.lock", "/usr/src/paperless/src/"]
|
||||
|
||||
# Packages needed only for building a few quick Python
|
||||
# dependencies
|
||||
ARG BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
git \
|
||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||
libpq-dev \
|
||||
# https://github.com/PyMySQL/mysqlclient#linux
|
||||
default-libmysqlclient-dev \
|
||||
pkg-config"
|
||||
|
||||
# hadolint ignore=DL3042
|
||||
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
||||
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
|
||||
set -eux \
|
||||
&& echo "Installing build system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output psycopg_c-3.2.3-cp312-cp312-linux_x86_64.whl \
|
||||
https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.3/psycopg_c-3.2.3-cp312-cp312-linux_x86_64.whl \
|
||||
&& curl --fail --silent --show-error --location \
|
||||
--output psycopg_c-3.2.3-cp312-cp312-linux_aarch64.whl \
|
||||
https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.3/psycopg_c-3.2.3-cp312-cp312-linux_aarch64.whl \
|
||||
&& python3 -m pip install --default-timeout=1000 --find-links . --requirement requirements.txt \
|
||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||
@@ -276,18 +252,16 @@ RUN set -eux \
|
||||
&& echo "Adjusting all permissions" \
|
||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
||||
&& echo "Collecting static files" \
|
||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||
&& gosu paperless python3 manage.py compilemessages
|
||||
&& s6-setuidgid paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||
&& s6-setuidgid paperless python3 manage.py compilemessages
|
||||
|
||||
VOLUME ["/usr/src/paperless/data", \
|
||||
"/usr/src/paperless/media", \
|
||||
"/usr/src/paperless/consume", \
|
||||
"/usr/src/paperless/export"]
|
||||
|
||||
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
||||
ENTRYPOINT ["/init"]
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000" ]
|
||||
|
||||
100
Pipfile
100
Pipfile
@@ -1,100 +0,0 @@
|
||||
[[source]]
|
||||
url = "https://pypi.python.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
dateparser = "~=1.2"
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
django = "~=5.1.4"
|
||||
django-allauth = {extras = ["mfa", "socialaccount"], version = "*"}
|
||||
django-auditlog = "*"
|
||||
django-celery-results = "*"
|
||||
django-compression-middleware = "*"
|
||||
django-cors-headers = "*"
|
||||
django-extensions = "*"
|
||||
django-filter = "~=24.3"
|
||||
django-guardian = "*"
|
||||
django-multiselectfield = "*"
|
||||
django-soft-delete = "*"
|
||||
djangorestframework = "~=3.15.2"
|
||||
djangorestframework-guardian = "*"
|
||||
drf-writable-nested = "*"
|
||||
bleach = "*"
|
||||
celery = {extras = ["redis"], version = "*"}
|
||||
channels = "~=4.2"
|
||||
channels-redis = "*"
|
||||
concurrent-log-handler = "*"
|
||||
filelock = "*"
|
||||
flower = "*"
|
||||
gotenberg-client = "*"
|
||||
gunicorn = "*"
|
||||
httpx-oauth = "*"
|
||||
imap-tools = "*"
|
||||
inotifyrecursive = "~=0.3"
|
||||
jinja2 = "~=3.1"
|
||||
langdetect = "*"
|
||||
mysqlclient = "*"
|
||||
nltk = "*"
|
||||
ocrmypdf = "~=16.7"
|
||||
pathvalidate = "*"
|
||||
pdf2image = "*"
|
||||
psycopg = {version = "*", extras = ["c"]}
|
||||
python-dateutil = "*"
|
||||
python-dotenv = "*"
|
||||
python-gnupg = "*"
|
||||
python-ipware = "*"
|
||||
python-magic = "*"
|
||||
pyzbar = "*"
|
||||
rapidfuzz = "*"
|
||||
redis = {extras = ["hiredis"], version = "*"}
|
||||
scikit-learn = "~=1.6"
|
||||
setproctitle = "*"
|
||||
tika-client = "*"
|
||||
tqdm = "*"
|
||||
# See https://github.com/paperless-ngx/paperless-ngx/issues/5494
|
||||
uvicorn = {extras = ["standard"], version = "==0.25.0"}
|
||||
watchdog = "~=6.0"
|
||||
whitenoise = "~=6.8"
|
||||
whoosh = "~=2.7"
|
||||
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
||||
|
||||
|
||||
[dev-packages]
|
||||
# Linting
|
||||
pre-commit = "*"
|
||||
ruff = "*"
|
||||
factory-boy = "*"
|
||||
# Testing
|
||||
pytest = "*"
|
||||
pytest-cov = "*"
|
||||
pytest-django = "*"
|
||||
pytest-httpx = "*"
|
||||
pytest-env = "*"
|
||||
pytest-sugar = "*"
|
||||
pytest-xdist = "*"
|
||||
pytest-mock = "*"
|
||||
pytest-rerunfailures = "*"
|
||||
imagehash = "*"
|
||||
daphne = "*"
|
||||
# Documentation
|
||||
mkdocs-material = "*"
|
||||
mkdocs-glightbox = "*"
|
||||
|
||||
[typing-dev]
|
||||
mypy = "*"
|
||||
types-Pillow = "*"
|
||||
django-filter-stubs = "*"
|
||||
types-python-dateutil = "*"
|
||||
djangorestframework-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||
celery-types = "*"
|
||||
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||
types-dateparser = "*"
|
||||
types-bleach = "*"
|
||||
types-redis = "*"
|
||||
types-tqdm = "*"
|
||||
types-Markdown = "*"
|
||||
types-Pygments = "*"
|
||||
types-colorama = "*"
|
||||
types-setuptools = "*"
|
||||
4861
Pipfile.lock
generated
4861
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -55,7 +55,7 @@ A full list of [features](https://docs.paperless-ngx.com/#features) and [screens
|
||||
|
||||
# Getting started
|
||||
|
||||
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from GitHub Packages.
|
||||
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from the GitHub container registry.
|
||||
|
||||
If you'd like to jump right in, you can configure a `docker compose` environment with our install script:
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
services:
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.7
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
hostname: gotenberg
|
||||
container_name: gotenberg
|
||||
network_mode: host
|
||||
|
||||
@@ -1,26 +1,17 @@
|
||||
###############################################################################
|
||||
# Paperless-ngx settings #
|
||||
###############################################################################
|
||||
|
||||
# See http://docs.paperless-ngx.com/configuration/ for all available options.
|
||||
|
||||
# The UID and GID of the user used to run paperless in the container. Set this
|
||||
# to your UID and GID on the host so that you have write access to the
|
||||
# consumption directory.
|
||||
#USERMAP_UID=1000
|
||||
#USERMAP_GID=1000
|
||||
|
||||
# Additional languages to install for text recognition, separated by a
|
||||
# whitespace. Note that this is
|
||||
# different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines the
|
||||
# language used for OCR.
|
||||
# The container installs English, German, Italian, Spanish and French by
|
||||
# default.
|
||||
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster
|
||||
# for available languages.
|
||||
#PAPERLESS_OCR_LANGUAGES=tur ces
|
||||
|
||||
###############################################################################
|
||||
# Paperless-specific settings #
|
||||
###############################################################################
|
||||
|
||||
# All settings defined in the paperless.conf.example can be used here. The
|
||||
# Docker setup does not use the configuration file.
|
||||
# A few commonly adjusted settings are provided below.
|
||||
# See the documentation linked above for all options. A few commonly adjusted settings
|
||||
# are provided below.
|
||||
|
||||
# This is required if you will be exposing Paperless-ngx on a public domain
|
||||
# (if doing so please consider security measures such as reverse proxy)
|
||||
@@ -30,13 +21,17 @@
|
||||
# be a very long sequence of random characters. You don't need to remember it.
|
||||
#PAPERLESS_SECRET_KEY=change-me
|
||||
|
||||
# Use this variable to set a timezone for the Paperless Docker containers. If not specified, defaults to UTC.
|
||||
# Use this variable to set a timezone for the Paperless Docker containers. Defaults to UTC.
|
||||
#PAPERLESS_TIME_ZONE=America/Los_Angeles
|
||||
|
||||
# The default language to use for OCR. Set this to the language most of your
|
||||
# documents are written in.
|
||||
#PAPERLESS_OCR_LANGUAGE=eng
|
||||
|
||||
# Set if accessing paperless via a domain subpath e.g. https://domain.com/PATHPREFIX and using a reverse-proxy like traefik or nginx
|
||||
#PAPERLESS_FORCE_SCRIPT_NAME=/PATHPREFIX
|
||||
#PAPERLESS_STATIC_URL=/PATHPREFIX/static/ # trailing slash required
|
||||
# Additional languages to install for text recognition, separated by a whitespace.
|
||||
# Note that this is different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines
|
||||
# the language used for OCR.
|
||||
# The container installs English, German, Italian, Spanish and French by default.
|
||||
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster
|
||||
# for available languages.
|
||||
#PAPERLESS_OCR_LANGUAGES=tur ces
|
||||
|
||||
@@ -36,7 +36,6 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:11
|
||||
restart: unless-stopped
|
||||
@@ -48,7 +47,6 @@ services:
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -75,9 +73,8 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.7
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
@@ -85,11 +82,9 @@ services:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -32,7 +32,6 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:11
|
||||
restart: unless-stopped
|
||||
@@ -44,7 +43,6 @@ services:
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -66,7 +64,6 @@ services:
|
||||
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
||||
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
||||
PAPERLESS_DBPORT: 3306
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -36,9 +36,8 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:16
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -46,7 +45,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -65,7 +63,6 @@ services:
|
||||
PAPERLESS_DBHOST: db
|
||||
env_file:
|
||||
- stack.env
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -36,9 +36,8 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:16
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -46,7 +45,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -69,22 +67,18 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.7
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -32,9 +32,8 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:16
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -42,7 +41,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -60,7 +58,6 @@ services:
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
PAPERLESS_DBHOST: db
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -36,7 +36,6 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -57,22 +56,18 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.7
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -29,7 +29,6 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@@ -45,7 +44,6 @@ services:
|
||||
env_file: docker-compose.env
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
wait_for_postgres() {
|
||||
local attempt_num=1
|
||||
local -r max_attempts=5
|
||||
|
||||
echo "Waiting for PostgreSQL to start..."
|
||||
|
||||
local -r host="${PAPERLESS_DBHOST:-localhost}"
|
||||
local -r port="${PAPERLESS_DBPORT:-5432}"
|
||||
|
||||
# Disable warning, host and port can't have spaces
|
||||
# shellcheck disable=SC2086
|
||||
while [ ! "$(pg_isready --host ${host} --port ${port})" ]; do
|
||||
|
||||
if [ $attempt_num -eq $max_attempts ]; then
|
||||
echo "Unable to connect to database."
|
||||
exit 1
|
||||
else
|
||||
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||
fi
|
||||
|
||||
attempt_num=$(("$attempt_num" + 1))
|
||||
sleep 5
|
||||
done
|
||||
echo "Connected to PostgreSQL"
|
||||
}
|
||||
|
||||
wait_for_mariadb() {
|
||||
echo "Waiting for MariaDB to start..."
|
||||
|
||||
local -r host="${PAPERLESS_DBHOST:=localhost}"
|
||||
local -r port="${PAPERLESS_DBPORT:=3306}"
|
||||
|
||||
local attempt_num=1
|
||||
local -r max_attempts=5
|
||||
|
||||
# Disable warning, host and port can't have spaces
|
||||
# shellcheck disable=SC2086
|
||||
while ! true > /dev/tcp/$host/$port; do
|
||||
|
||||
if [ $attempt_num -eq $max_attempts ]; then
|
||||
echo "Unable to connect to database."
|
||||
exit 1
|
||||
else
|
||||
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||
|
||||
fi
|
||||
|
||||
attempt_num=$(("$attempt_num" + 1))
|
||||
sleep 5
|
||||
done
|
||||
echo "Connected to MariaDB"
|
||||
}
|
||||
|
||||
wait_for_redis() {
|
||||
# We use a Python script to send the Redis ping
|
||||
# instead of installing redis-tools just for 1 thing
|
||||
if ! python3 /sbin/wait-for-redis.py; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
migrations() {
|
||||
(
|
||||
# flock is in place to prevent multiple containers from doing migrations
|
||||
# simultaneously. This also ensures that the db is ready when the command
|
||||
# of the current container starts.
|
||||
flock 200
|
||||
echo "Apply database migrations..."
|
||||
python3 manage.py migrate --skip-checks --no-input
|
||||
) 200>"${DATA_DIR}/migration_lock"
|
||||
}
|
||||
|
||||
django_checks() {
|
||||
# Explicitly run the Django system checks
|
||||
echo "Running Django checks"
|
||||
python3 manage.py check
|
||||
}
|
||||
|
||||
search_index() {
|
||||
|
||||
local -r index_version=9
|
||||
local -r index_version_file=${DATA_DIR}/.index_version
|
||||
|
||||
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||
echo "Search index out of date. Updating..."
|
||||
python3 manage.py document_index reindex --no-progress-bar
|
||||
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
superuser() {
|
||||
if [[ -n "${PAPERLESS_ADMIN_USER}" ]]; then
|
||||
python3 manage.py manage_superuser
|
||||
fi
|
||||
}
|
||||
|
||||
do_work() {
|
||||
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
||||
wait_for_mariadb
|
||||
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
||||
wait_for_postgres
|
||||
fi
|
||||
|
||||
wait_for_redis
|
||||
|
||||
migrations
|
||||
|
||||
django_checks
|
||||
|
||||
search_index
|
||||
|
||||
superuser
|
||||
|
||||
}
|
||||
|
||||
do_work
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Scans the environment variables for those with the suffix _FILE
|
||||
# When located, checks the file exists, and exports the contents
|
||||
# of the file as the same name, minus the suffix
|
||||
# This allows the use of Docker secrets or mounted files
|
||||
# to fill in any of the settings configurable via environment
|
||||
# variables
|
||||
|
||||
set -eu
|
||||
|
||||
for line in $(printenv)
|
||||
do
|
||||
# Extract the name of the environment variable
|
||||
env_name=${line%%=*}
|
||||
# Check if it starts with "PAPERLESS_" and ends in "_FILE"
|
||||
if [[ ${env_name} == PAPERLESS_*_FILE ]]; then
|
||||
# This should have been named different..
|
||||
if [[ ${env_name} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${env_name} == "PAPERLESS_MODEL_FILE" ]]; then
|
||||
continue
|
||||
fi
|
||||
# Extract the value of the environment
|
||||
env_value=${line#*=}
|
||||
|
||||
# Check the file exists
|
||||
if [[ -f ${env_value} ]]; then
|
||||
|
||||
# Trim off the _FILE suffix
|
||||
non_file_env_name=${env_name%"_FILE"}
|
||||
echo "Setting ${non_file_env_name} from file"
|
||||
|
||||
# Reads the value from th file
|
||||
val="$(< "${!env_name}")"
|
||||
|
||||
# Sets the normal name to the read file contents
|
||||
export "${non_file_env_name}"="${val}"
|
||||
|
||||
else
|
||||
echo "File ${env_value} referenced by ${env_name} doesn't exist"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "Checking if we should start flower..."
|
||||
|
||||
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
||||
# Small delay to allow celery to be up first
|
||||
echo "Starting flower in 5s"
|
||||
sleep 5
|
||||
celery --app paperless flower --conf=/usr/src/paperless/src/paperless/flowerconfig.py
|
||||
else
|
||||
echo "Not starting flower"
|
||||
fi
|
||||
BIN
docker/init-flow.drawio.png
Normal file
BIN
docker/init-flow.drawio.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 30 KiB |
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Run this script to generate the management commands again (for example if a new command is create or the template is updated)
|
||||
|
||||
set -eu
|
||||
|
||||
for command in decrypt_documents \
|
||||
@@ -19,6 +21,6 @@ for command in decrypt_documents \
|
||||
prune_audit_logs;
|
||||
do
|
||||
echo "installing $command..."
|
||||
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
||||
chmod +x /usr/local/bin/$command
|
||||
sed "s/management_command/$command/g" management_script.sh >"$PWD/rootfs/usr/local/bin/$command"
|
||||
chmod +x "$PWD/rootfs/usr/local/bin/$command"
|
||||
done
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
cd /usr/src/paperless/src/
|
||||
# This ensures environment is setup
|
||||
# shellcheck disable=SC1091
|
||||
source /sbin/env-from-file.sh
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ $(id -u) == 0 ]] ;
|
||||
then
|
||||
gosu paperless python3 manage.py management_command "$@"
|
||||
elif [[ $(id -un) == "paperless" ]] ;
|
||||
then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py management_command "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py management_command "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
SUPERVISORD_WORKING_DIR="${PAPERLESS_SUPERVISORD_WORKING_DIR:-$PWD}"
|
||||
rootless_args=()
|
||||
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||
rootless_args=(
|
||||
--user
|
||||
paperless
|
||||
--logfile
|
||||
"${SUPERVISORD_WORKING_DIR}/supervisord.log"
|
||||
--pidfile
|
||||
"${SUPERVISORD_WORKING_DIR}/supervisord.pid"
|
||||
)
|
||||
fi
|
||||
|
||||
exec /usr/local/bin/supervisord -c /etc/supervisord.conf "${rootless_args[@]}"
|
||||
8
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/run
Executable file
8
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/run
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
declare -r log_prefix="[init-complete]"
|
||||
declare -r end_time=$(date +%s)
|
||||
declare -r start_time=${PAPERLESS_START_TIME_S}
|
||||
|
||||
echo "${log_prefix} paperless-ngx docker container init completed in $(($end_time-$start_time)) seconds"
|
||||
echo "${log_prefix} Starting services"
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/type
Normal file
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-complete/run
|
||||
44
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/run
Executable file
44
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/run
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[custom-init]"
|
||||
|
||||
# Mostly borrowed from the LinuxServer.io base image
|
||||
# https://github.com/linuxserver/docker-baseimage-ubuntu/tree/bionic/root/etc/cont-init.d
|
||||
declare -r custom_script_dir="/custom-cont-init.d"
|
||||
|
||||
# Tamper checking.
|
||||
# Don't run files which are owned by anyone except root
|
||||
# Don't run files which are writeable by others
|
||||
if [ -d "${custom_script_dir}" ]; then
|
||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 ! -user root)" ]; then
|
||||
echo "${log_prefix} **** Potential tampering with custom scripts detected ****"
|
||||
echo "${log_prefix} **** The folder '${custom_script_dir}' must be owned by root ****"
|
||||
exit 0
|
||||
fi
|
||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 -perm -o+w)" ]; then
|
||||
echo "${log_prefix} **** The folder '${custom_script_dir}' or some of contents have write permissions for others, which is a security risk. ****"
|
||||
echo "${log_prefix} **** Please review the permissions and their contents to make sure they are owned by root, and can only be modified by root. ****"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Make sure custom init directory has files in it
|
||||
if [ -n "$(/bin/ls --almost-all "${custom_script_dir}" 2>/dev/null)" ]; then
|
||||
echo "${log_prefix} files found in ${custom_script_dir} executing"
|
||||
# Loop over files in the directory
|
||||
for SCRIPT in "${custom_script_dir}"/*; do
|
||||
NAME="$(basename "${SCRIPT}")"
|
||||
if [ -f "${SCRIPT}" ]; then
|
||||
echo "${log_prefix} ${NAME}: executing..."
|
||||
/command/with-contenv /bin/bash "${SCRIPT}"
|
||||
echo "${log_prefix} ${NAME}: exited $?"
|
||||
elif [ ! -f "${SCRIPT}" ]; then
|
||||
echo "${log_prefix} ${NAME}: is not a file"
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "${log_prefix} no custom files found exiting..."
|
||||
fi
|
||||
else
|
||||
echo "${log_prefix} ${custom_script_dir} doesn't exist, nothing to do"
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-custom-init/run
|
||||
30
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/run
Executable file
30
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/run
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[env-init]"
|
||||
|
||||
echo "${log_prefix} Checking for environment from files"
|
||||
|
||||
if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; then
|
||||
for FILENAME in /run/s6/container_environment/*; do
|
||||
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
||||
# This should have been named different..
|
||||
if [[ ${FILENAME} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${FILENAME} == "PAPERLESS_MODEL_FILE" ]]; then
|
||||
continue
|
||||
fi
|
||||
SECRETFILE=$(cat "${FILENAME}")
|
||||
# Check the file exists
|
||||
if [[ -f ${SECRETFILE} ]]; then
|
||||
# Trim off trailing _FILE
|
||||
FILESTRIP=${FILENAME//_FILE/}
|
||||
# Set environment variable
|
||||
cat "${SECRETFILE}" > "${FILESTRIP}"
|
||||
echo "${log_prefix} ${FILESTRIP##*/} set from ${FILENAME##*/}"
|
||||
else
|
||||
echo "${log_prefix} cannot find secret in ${FILENAME##*/}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "${log_prefix} No *_FILE environment found"
|
||||
fi
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/type
Normal file
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-env-file/run
|
||||
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/run
Executable file
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/run
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[init-folders]"
|
||||
|
||||
declare -r export_dir="/usr/src/paperless/export"
|
||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||
declare -r media_root_dir="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
||||
declare -r consume_dir="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
|
||||
declare -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
|
||||
|
||||
echo "${log_prefix} Checking for folder existence"
|
||||
|
||||
for dir in \
|
||||
"${export_dir}" \
|
||||
"${data_dir}" "${data_dir}/index" \
|
||||
"${media_root_dir}" "${media_root_dir}/documents" "${media_root_dir}/documents/originals" "${media_root_dir}/documents/thumbnails" \
|
||||
"${consume_dir}" \
|
||||
"${tmp_dir}"; do
|
||||
if [[ ! -d "${dir}" ]]; then
|
||||
mkdir --parents --verbose "${dir}"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "${log_prefix} Adjusting file and folder permissions"
|
||||
for dir in \
|
||||
"${export_dir}" \
|
||||
"${data_dir}" \
|
||||
"${media_root_dir}" \
|
||||
"${consume_dir}" \
|
||||
"${tmp_dir}"; do
|
||||
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown --changes paperless:paperless {} +
|
||||
done
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/type
Normal file
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-folders/run
|
||||
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/run
Executable file
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/run
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
declare -r log_prefix="[init-migrations]"
|
||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||
|
||||
(
|
||||
# flock is in place to prevent multiple containers from doing migrations
|
||||
# simultaneously. This also ensures that the db is ready when the command
|
||||
# of the current container starts.
|
||||
flock 200
|
||||
echo "${log_prefix} Apply database migrations..."
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
exec python3 manage.py migrate --skip-checks --no-input
|
||||
else
|
||||
exec s6-setuidgid paperless python3 manage.py migrate --skip-checks --no-input
|
||||
fi
|
||||
|
||||
) 200>"${data_dir}/migration_lock"
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-migrations/run
|
||||
22
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/run
Executable file
22
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/run
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
declare -r log_prefix="[init-user]"
|
||||
|
||||
declare -r usermap_original_uid=$(id -u paperless)
|
||||
declare -r usermap_original_gid=$(id -g paperless)
|
||||
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
||||
declare -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
||||
|
||||
if [[ ${usermap_new_uid} != "${usermap_original_uid}" ]]; then
|
||||
echo "${log_prefix} Mapping UID for paperless to $usermap_new_uid"
|
||||
usermod --non-unique --uid "${usermap_new_uid}" paperless
|
||||
else
|
||||
echo "${log_prefix} No UID changes for paperless"
|
||||
fi
|
||||
|
||||
if [[ ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
||||
echo "${log_prefix} Mapping GID for paperless to $usermap_new_gid"
|
||||
groupmod --non-unique --gid "${usermap_new_gid}" paperless
|
||||
else
|
||||
echo "${log_prefix} No GID changes for paperless"
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-modify-user/run
|
||||
28
docker/rootfs/etc/s6-overlay/s6-rc.d/init-search-index/run
Executable file
28
docker/rootfs/etc/s6-overlay/s6-rc.d/init-search-index/run
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[init-index]"
|
||||
|
||||
declare -r index_version=9
|
||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||
declare -r index_version_file="${data_dir}/.index_version"
|
||||
|
||||
update_index () {
|
||||
echo "${log_prefix} Search index out of date. Updating..."
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_index reindex --no-progress-bar
|
||||
echo ${index_version} | tee "${index_version_file}" > /dev/null
|
||||
else
|
||||
s6-setuidgid paperless python3 manage.py document_index reindex --no-progress-bar
|
||||
echo ${index_version} | s6-setuidgid paperless tee "${index_version_file}" > /dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ (! -f "${index_version_file}") ]]; then
|
||||
echo "${log_prefix} No index version file found"
|
||||
update_index
|
||||
elif [[ $(<"${index_version_file}") != "$index_version" ]]; then
|
||||
echo "${log_prefix} index version updated"
|
||||
update_index
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-search-index/run
|
||||
19
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/run
Executable file
19
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/run
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[init-start]"
|
||||
|
||||
echo "${log_prefix} paperless-ngx docker container starting..."
|
||||
|
||||
# Set some directories into environment for other steps to access via environment
|
||||
# Sort of like variables for later
|
||||
printf "/usr/src/paperless/src" > /var/run/s6/container_environment/PAPERLESS_SRC_DIR
|
||||
echo $(date +%s) > /var/run/s6/container_environment/PAPERLESS_START_TIME_S
|
||||
|
||||
# Check if we're starting as a non-root user
|
||||
if [ $(id -u) == $(id -u paperless) ]; then
|
||||
printf "true" > /var/run/s6/container_environment/USER_IS_NON_ROOT
|
||||
echo "${log_prefix} paperless-ngx docker container running under a user"
|
||||
else
|
||||
echo "${log_prefix} paperless-ngx docker container starting init as root"
|
||||
fi
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/type
Normal file
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-start/run
|
||||
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/run
Executable file
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/run
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[init-superuser]"
|
||||
|
||||
if [[ -n "${PAPERLESS_ADMIN_USER}" ]]; then
|
||||
echo "${log_prefix} Creating superuser..."
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py manage_superuser
|
||||
else
|
||||
s6-setuidgid paperless python3 manage.py manage_superuser
|
||||
fi
|
||||
|
||||
echo "${log_prefix} Superuser creation done"
|
||||
|
||||
else
|
||||
echo "${log_prefix} Not creating superuser"
|
||||
fi
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/type
Normal file
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/up
Normal file
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/init-superuser/run
|
||||
15
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/run
Executable file
15
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/run
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
declare -r log_prefix="[init-checks]"
|
||||
|
||||
# Explicitly run the Django system checks
|
||||
echo "${log_prefix} Running Django checks"
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py check
|
||||
else
|
||||
s6-setuidgid paperless python3 manage.py check
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user