Compare commits

...

78 Commits

Author SHA1 Message Date
Alexander ADAM
c2d7f3ebc6
feat: add option to disable OCR (#768)
fixes #344
refs #767
2024-09-07 22:27:48 +02:00
Kaiyao Ke
8f1ff56d34
fixed remaining non-idempotent tests (#758) 2024-06-03 10:28:10 +02:00
Kaiyao Ke
11ae0ea7d3
fixed non-idempotent tests (#757) 2024-05-17 15:37:43 +02:00
bgamard
afa78857f9 fix test 2024-04-03 22:03:52 +02:00
bgamard
ae2423b2e9 remove check_username route 2024-04-03 21:53:00 +02:00
bgamard
01d3e746d8 cleanup Dockerfile + upgrade to jetty 11.0.20 2024-03-01 22:06:50 +01:00
Sukalpo Mitra
13cd03a762
Self contained Teedy Dockerfile (#745) 2024-03-01 21:46:17 +01:00
bgamard
ac7b3c4eb9 Merge remote-tracking branch 'origin/master' 2024-02-19 18:34:02 +01:00
bgamard
7effbc8de0 list display mode in the share app 2024-02-19 18:33:56 +01:00
Sukalpo Mitra
8c5f0c78e7
Added support for JWT based authentication (#739) 2023-12-02 17:57:51 +01:00
Erich Mauerböck
45e00ac93d
add explicit binding (#735)
* add explicit binding

* fixup building on windows

* reactivate unit test

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:58:11 +01:00
Erich Mauerböck
80454afc0d
fix unit test (#736)
Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:57:33 +01:00
Erich Mauerböck
428e898a7a
allow hyphen in username (#731)
* allow hyphen in username

* remove extra escaping

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-03 15:36:02 +01:00
Julien Kirch
13762eb67f
Upgrade pdfbox version to 2.0.29 (#728) 2023-10-20 15:41:45 +02:00
Julien Kirch
04c43ebf7b
Specify document search parameter as HTTP params (#722) 2023-10-19 18:34:04 +02:00
Julien Kirch
f9b5a5212d
Allow to specify a pool size (#727) 2023-10-09 14:05:13 +02:00
Julien Kirch
0351f94761
Upgrade Hibernate version (#726) 2023-10-09 12:36:53 +02:00
Julien Kirch
a89543b555
Make search for documents faster for large dataset (#698) 2023-10-08 22:07:01 +02:00
Benjamin Gamard
ce30b1a6ff
fix build 2023-09-15 22:05:04 +02:00
Orland Karamani
1b382004cb
Albanian Language Support (#719)
Co-authored-by: Orlando Karamani <orlandothemover@gmail.com>
2023-09-14 16:51:11 +02:00
Julien Kirch
ab7ff25929
Store file size in DB (#704) 2023-09-14 16:50:39 +02:00
Julien Kirch
eedf19ad9d
Fix no favicon on shares #580 (#718) 2023-09-08 15:43:35 +02:00
Julien Kirch
941ace99c6
Fix typo in /file/:id/versions description (#717) 2023-09-07 16:46:43 +02:00
bgamard
95e0b870f6 Merge remote-tracking branch 'origin/master' 2023-06-29 21:33:12 +02:00
bgamard
2bdb2dc34f #678: reopen ldap connection for each login 2023-06-29 21:33:05 +02:00
Julien Kirch
22a44d0c8d
Finding several documents by their title in a single query (#696) 2023-06-06 21:31:01 +02:00
Julien Kirch
a9cdbdc03e
Add missing french translations (#694) 2023-06-05 16:02:55 +02:00
Julien Kirch
3fd5470eae
Add mention in the API doc that document endpoint returns the document's metadata (#695) 2023-06-04 21:49:36 +02:00
39f96cbd28
Update config.properties (#693)
fix db version to reflect the most recent
2023-06-04 21:48:55 +02:00
4501f10429
fix comma to make valid language de.json file again 2023-05-07 11:56:34 +02:00
bd0cde7e87
Add support for STARTTLS for Inbox Scanning (#682) 2023-04-25 18:27:46 +02:00
bgamard
dd36e08d7d #680: warning when using H2 database 2023-04-22 00:47:01 +02:00
Jose Luis Montes Jiménez
4634def93e
updating README.md (#681)
H2 database should only be use for testing, setting the docker-compose with postgreSQL as default way.
2023-04-22 00:12:48 +02:00
bgamard
1974a8bb8d #668: cleanup hibernate dependencies 2023-04-12 17:58:51 +02:00
bgamard
e9a6609593 #668: jetty 11 deployment 2023-04-12 13:35:54 +02:00
bgamard
b20577026e Closes #668: upgrade jetty/servlet-api/jersey 2023-04-09 21:31:53 +02:00
bgamard
dae9e137f7 Merge remote-tracking branch 'origin/master' 2023-03-22 10:23:18 +01:00
bgamard
1509d0c5bb revert h2 upgrade 2023-03-22 10:23:11 +01:00
430ebbd1c5
support ldaps (#670) 2023-03-21 21:56:14 +01:00
bgamard
b561eaee6d portuguese translation 2023-03-20 20:20:52 +01:00
bgamard
1aa21c3762 bump dependencies 2023-03-19 14:28:22 +01:00
bgamard
c8a67177d8 next dev iteration 2023-03-12 14:11:52 +01:00
bgamard
59597e962d 1.11 2023-03-12 13:58:03 +01:00
bgamard
c85a951a9e upgrade base image 2023-03-12 13:52:30 +01:00
bgamard
7f47a17633 upgrade jetty 2023-03-12 13:45:36 +01:00
bgamard
690c961a55 Merge remote-tracking branch 'origin/master' 2023-03-12 13:35:51 +01:00
bgamard
21efd1e4a7 Closes #658 2023-03-12 13:35:35 +01:00
@RandyMcMillan
ad27228429
docker-compose.yml: add example config (#665) 2023-02-20 11:51:39 +01:00
@RandyMcMillan
dd4a1667ca
.gitignore: add docs/.gitkeep (#664) 2023-02-20 11:51:30 +01:00
@RandyMcMillan
399d2b7951
minor grammar corrections (#663) 2023-02-19 21:31:30 +01:00
bgamard
d51dfd6636 #647: fix doc 2022-08-26 18:18:06 +02:00
bgamard
ca85c1fa9f #647: always return OK on password lost route 2022-08-26 18:15:49 +02:00
bgamard
5e7f06070e keep filename in temporary file 2022-05-16 19:22:54 +02:00
bgamard
dc0c20cd0c moved tests 2022-05-16 18:53:08 +02:00
bgamard
98aa33341a moved tests 2022-05-16 18:50:19 +02:00
bgamard
1f7c0afc1e Closes #639: rework mime type resolution using java api 2022-05-16 18:44:26 +02:00
bgamard
1ccce3f942 rename 2022-05-05 18:15:24 +02:00
Uli
90d5bc8de7
Allow the . (dot) and @ (at) character in usernames (#637)
Co-authored-by: Uli Koeth <uli@kiot.eu>
2022-05-05 17:48:45 +02:00
bgamard
c6a685d7c0 Closes #620: delete a non-existing document should return 404 2022-04-17 13:35:29 +02:00
bgamard
e6cfd899e5 Closes #632: validate POST /app/config_inbox and update documentation 2022-04-17 13:23:22 +02:00
Julien Kirch
bd23f14792
Add doc for search syntax (#634) 2022-04-17 13:10:01 +02:00
Julien Kirch
46f6b9e537
Download zip of files not in same document (#591) 2022-04-15 10:18:39 +02:00
Julien Kirch
d5832c48e1
Small code cleaning 2022-03-21 11:36:25 +01:00
Julien Kirch
64ec0f63ca
Add parameter to return the files when searching for a document (#582) 2022-03-20 11:36:28 +01:00
Ben Grabham
0b7c42e814
Check if environment variables are not empty strings as well as not null (#623) 2022-02-20 15:48:37 +01:00
bgamard
d8dc63fc98 Merge remote-tracking branch 'origin/master' 2022-02-02 21:18:06 +01:00
bgamard
81a7f154c2 logs only for admin 2022-02-02 21:17:58 +01:00
StaryVena
af3263d471
Add OCR support for Czech language (#613)
Co-authored-by: Vaclav Uher <vaclav.uher@bruker.com>
2022-01-26 15:27:14 +01:00
Dan Schaper
bbe5f19997
Tag latest on master, tag version on github tag. (#612)
Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-25 10:37:47 +01:00
Benjamin Gamard
f33650c099
fix action 2022-01-21 13:51:16 +01:00
Benjamin Gamard
58f81ec851
fix action 2022-01-21 13:37:31 +01:00
Dan Schaper
c9262eb204
Add build tags and labels (#608)
Fixes Docker images always build as 'latest' #607

Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-21 13:35:39 +01:00
bgamard
3637b832e5 test the new mime type detection 2022-01-17 14:37:22 +01:00
Joost Timmerman
ee56cfe2b4
Support audio mime (#574) 2022-01-17 14:24:50 +01:00
bgamard
721410c7d0 add test dependencies 2022-01-13 00:15:37 +01:00
bgamard
f0310e3933 add test dependencies 2022-01-13 00:06:29 +01:00
bgamard
302d7cccc4 run tests + fix docker username 2022-01-12 23:59:43 +01:00
Dan Schaper
f9977d5ce6
Actions workflow (#601)
Signed-off-by: Dan Schaper <dan@glacialmagma.com>
2022-01-12 23:49:34 +01:00
208 changed files with 5652 additions and 2096 deletions

84
.github/workflows/build-deploy.yml vendored Normal file
View File

@ -0,0 +1,84 @@
name: Maven CI/CD
on:
push:
branches: [master]
tags: [v*]
workflow_dispatch:
jobs:
build_and_publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: "11"
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get update && sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn --batch-mode -Pprod clean install
- name: Upload war artifact
uses: actions/upload-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target/docs*.war
build_docker_image:
name: Publish to Docker Hub
runs-on: ubuntu-latest
needs: [build_and_publish]
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Download war artifact
uses: actions/download-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target
-
name: Setup up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Populate Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: sismics/docs
flavor: |
latest=false
tags: |
type=ref,event=tag
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
labels: |
org.opencontainers.image.title = Teedy
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
org.opencontainers.image.created = ${{ github.event_created_at }}
org.opencontainers.image.author = Sismics
org.opencontainers.image.url = https://teedy.io/
org.opencontainers.image.vendor = Sismics
org.opencontainers.image.license = GPLv2
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

7
.gitignore vendored
View File

@ -13,4 +13,9 @@ node_modules
import_test import_test
teedy-importer-linux teedy-importer-linux
teedy-importer-macos teedy-importer-macos
teedy-importer-win.exe teedy-importer-win.exe
docs/*
!docs/.gitkeep
#macos
.DS_Store

View File

@ -1,12 +1,26 @@
FROM sismics/ubuntu-jetty:9.4.36 FROM ubuntu:22.04
LABEL maintainer="b.gamard@sismics.com" LABEL maintainer="b.gamard@sismics.com"
# Run Debian in non interactive mode
ENV DEBIAN_FRONTEND noninteractive
# Configure env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV JAVA_HOME /usr/lib/jvm/java-11-openjdk-amd64/
ENV JAVA_OPTIONS -Dfile.encoding=UTF-8 -Xmx1g
ENV JETTY_VERSION 11.0.20
ENV JETTY_HOME /opt/jetty
# Install packages
RUN apt-get update && \ RUN apt-get update && \
apt-get -y -q --no-install-recommends install \ apt-get -y -q --no-install-recommends install \
vim less procps unzip wget tzdata openjdk-11-jdk \
ffmpeg \ ffmpeg \
mediainfo \ mediainfo \
tesseract-ocr \ tesseract-ocr \
tesseract-ocr-ara \ tesseract-ocr-ara \
tesseract-ocr-ces \
tesseract-ocr-chi-sim \ tesseract-ocr-chi-sim \
tesseract-ocr-chi-tra \ tesseract-ocr-chi-tra \
tesseract-ocr-dan \ tesseract-ocr-dan \
@ -30,13 +44,32 @@ RUN apt-get update && \
tesseract-ocr-tha \ tesseract-ocr-tha \
tesseract-ocr-tur \ tesseract-ocr-tur \
tesseract-ocr-ukr \ tesseract-ocr-ukr \
tesseract-ocr-vie && \ tesseract-ocr-vie \
apt-get clean && rm -rf /var/lib/apt/lists/* tesseract-ocr-sqi \
&& apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN dpkg-reconfigure -f noninteractive tzdata
# Remove the embedded javax.mail jar from Jetty # Install Jetty
RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar RUN wget -nv -O /tmp/jetty.tar.gz \
"https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-home/${JETTY_VERSION}/jetty-home-${JETTY_VERSION}.tar.gz" \
&& tar xzf /tmp/jetty.tar.gz -C /opt \
&& mv /opt/jetty* /opt/jetty \
&& useradd jetty -U -s /bin/false \
&& chown -R jetty:jetty /opt/jetty \
&& mkdir /opt/jetty/webapps \
&& chmod +x /opt/jetty/bin/jetty.sh
ADD docs.xml /opt/jetty/webapps/docs.xml EXPOSE 8080
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war
ENV JAVA_OPTIONS -Xmx1g # Install app
RUN mkdir /app && \
cd /app && \
java -jar /opt/jetty/start.jar --add-modules=server,http,webapp,deploy
ADD docs.xml /app/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /app/webapps/docs.war
WORKDIR /app
CMD ["java", "-jar", "/opt/jetty/start.jar"]

105
README.md
View File

@ -3,6 +3,7 @@
</h3> </h3>
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) [![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Maven CI/CD](https://github.com/sismics/docs/actions/workflows/build-deploy.yml/badge.svg)](https://github.com/sismics/docs/actions/workflows/build-deploy.yml)
Teedy is an open source, lightweight document management system for individuals and businesses. Teedy is an open source, lightweight document management system for individuals and businesses.
@ -14,8 +15,7 @@ Teedy is an open source, lightweight document management system for individuals
![New!](https://teedy.io/img/laptop-demo.png?20180301) ![New!](https://teedy.io/img/laptop-demo.png?20180301)
Demo # Demo
----
A demo is available at [demo.teedy.io](https://demo.teedy.io) A demo is available at [demo.teedy.io](https://demo.teedy.io)
@ -23,8 +23,7 @@ A demo is available at [demo.teedy.io](https://demo.teedy.io)
- "admin" login with "admin" password - "admin" login with "admin" password
- "demo" login with "password" password - "demo" login with "password" password
Features # Features
--------
- Responsive user interface - Responsive user interface
- Optical character recognition - Optical character recognition
@ -54,21 +53,20 @@ Features
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode) - [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
- Tested to one million documents - Tested to one million documents
Install with Docker # Install with Docker
-------------------
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance. A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. If no PostgreSQL config is provided, the database is an embedded H2 database. The H2 embedded database should only be used for testing. For production usage use the provided PostgreSQL configuration (check the Docker Compose example)
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest` - Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
- Latest stable version: `sismics/docs:v1.10` - Latest stable version: `sismics/docs:v1.11`
The data directory is `/data`. Don't forget to mount a volume on it. The data directory is `/data`. Don't forget to mount a volume on it.
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com) To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
### Available environment variables ## Available environment variables
- General - General
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base. - `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
@ -83,6 +81,7 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`. - `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
- `DATABASE_USER`: The user which should be used for the database connection. - `DATABASE_USER`: The user which should be used for the database connection.
- `DATABASE_PASSWORD`: The password to be used for the database connection. - `DATABASE_PASSWORD`: The password to be used for the database connection.
- `DATABASE_POOL_SIZE`: The pool size to be used for the database connection.
- Language - Language
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are: - `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
@ -94,41 +93,19 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
- `DOCS_SMTP_USERNAME`: The username to be used. - `DOCS_SMTP_USERNAME`: The username to be used.
- `DOCS_SMTP_PASSWORD`: The password to be used. - `DOCS_SMTP_PASSWORD`: The password to be used.
### Examples ## Examples
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords. In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
#### Using the internal database
### Default, using PostgreSQL
```yaml ```yaml
version: '3' version: '3'
services: services:
# Teedy Application # Teedy Application
teedy-server: teedy-server:
image: sismics/docs:v1.10 image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
#### Using PostgreSQL
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.10
restart: unless-stopped restart: unless-stopped
ports: ports:
# Map internal port to host # Map internal port to host
@ -146,6 +123,7 @@ services:
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy" DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
DATABASE_USER: "teedy_db_user" DATABASE_USER: "teedy_db_user"
DATABASE_PASSWORD: "teedy_db_password" DATABASE_PASSWORD: "teedy_db_password"
DATABASE_POOL_SIZE: "10"
volumes: volumes:
- ./docs/data:/data - ./docs/data:/data
networks: networks:
@ -179,10 +157,32 @@ networks:
driver: bridge driver: bridge
``` ```
Manual installation ### Using the internal database (only for testing)
-------------------
#### Requirements ```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
# Manual installation
## Requirements
- Java 11 - Java 11
- Tesseract 4 for OCR - Tesseract 4 for OCR
@ -190,13 +190,12 @@ Manual installation
- mediainfo for video metadata extraction - mediainfo for video metadata extraction
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/) - A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
#### Download ## Download
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format. The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
How to build Teedy from the sources ## How to build Teedy from the sources
----------------------------------
Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4 Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
@ -209,35 +208,39 @@ Teedy is organized in several Maven modules:
First off, clone the repository: `git clone git://github.com/sismics/docs.git` First off, clone the repository: `git clone git://github.com/sismics/docs.git`
or download the sources from GitHub. or download the sources from GitHub.
#### Launch the build ### Launch the build
From the root directory: From the root directory:
mvn clean -DskipTests install ```console
mvn clean -DskipTests install
```
#### Run a stand-alone version ### Run a stand-alone version
From the `docs-web` directory: From the `docs-web` directory:
mvn jetty:run ```console
mvn jetty:run
```
#### Build a .war to deploy to your servlet container ### Build a .war to deploy to your servlet container
From the `docs-web` directory: From the `docs-web` directory:
mvn -Pprod -DskipTests clean install ```console
mvn -Pprod -DskipTests clean install
```
You will get your deployable WAR in the `docs-web/target` directory. You will get your deployable WAR in the `docs-web/target` directory.
Contributing # Contributing
------------
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on. All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there. The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
License # License
-------
Teedy is released under the terms of the GPL license. See `COPYING` for more Teedy is released under the terms of the GPL license. See `COPYING` for more
information or see <http://opensource.org/licenses/GPL-2.0>. information or see <http://opensource.org/licenses/GPL-2.0>.

18
docker-compose.yml Normal file
View File

@ -0,0 +1,18 @@
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.10
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data

View File

@ -5,10 +5,10 @@
<parent> <parent>
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<version>1.10</version> <version>1.12-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>docs-core</artifactId> <artifactId>docs-core</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>
@ -17,20 +17,10 @@
<dependencies> <dependencies>
<!-- Persistence layer dependencies --> <!-- Persistence layer dependencies -->
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate.orm</groupId>
<artifactId>hibernate-core</artifactId> <artifactId>hibernate-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
</dependency>
<!-- Other external dependencies --> <!-- Other external dependencies -->
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
@ -41,30 +31,30 @@
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId> <artifactId>commons-compress</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId> <artifactId>commons-email</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.freemarker</groupId> <groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId> <artifactId>freemarker</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish</groupId> <groupId>jakarta.json</groupId>
<artifactId>javax.json</artifactId> <artifactId>jakarta.json-api</artifactId>
</dependency> </dependency>
<dependency> <dependency>
@ -76,17 +66,17 @@
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>log4j</artifactId> <artifactId>log4j</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>jcl-over-slf4j</artifactId>
@ -96,17 +86,17 @@
<groupId>at.favre.lib</groupId> <groupId>at.favre.lib</groupId>
<artifactId>bcrypt</artifactId> <artifactId>bcrypt</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId> <artifactId>lucene-analyzers-common</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId> <artifactId>lucene-queryparser</artifactId>
@ -122,11 +112,6 @@
<artifactId>lucene-highlighter</artifactId> <artifactId>lucene-highlighter</artifactId>
</dependency> </dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
</dependency>
<dependency> <dependency>
<groupId>com.squareup.okhttp3</groupId> <groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId> <artifactId>okhttp</artifactId>
@ -134,7 +119,12 @@
<dependency> <dependency>
<groupId>org.apache.directory.api</groupId> <groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId> <artifactId>api-ldap-client-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId>
</dependency> </dependency>
<!-- Only there to read old index and rebuild them --> <!-- Only there to read old index and rebuild them -->
@ -142,22 +132,22 @@
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId> <artifactId>lucene-backward-codecs</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.imgscalr</groupId> <groupId>org.imgscalr</groupId>
<artifactId>imgscalr-lib</artifactId> <artifactId>imgscalr-lib</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId> <artifactId>pdfbox</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.bouncycastle</groupId> <groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId> <artifactId>bcprov-jdk15on</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>fr.opensagres.xdocreport</groupId> <groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.odfdom.converter.pdf</artifactId> <artifactId>fr.opensagres.odfdom.converter.pdf</artifactId>
@ -195,39 +185,20 @@
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
</dependency> </dependency>
<!-- JDK 11 JAXB dependencies -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-core</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-impl</artifactId>
<version>2.3.0</version>
</dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.h2database</groupId> <groupId>com.h2database</groupId>
<artifactId>h2</artifactId> <artifactId>h2</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<profiles> <profiles>
<!-- Development profile (active by default) --> <!-- Development profile (active by default) -->
<profile> <profile>
@ -239,7 +210,7 @@
<value>dev</value> <value>dev</value>
</property> </property>
</activation> </activation>
<build> <build>
<resources> <resources>
<resource> <resource>
@ -255,7 +226,7 @@
<id>prod</id> <id>prod</id>
</profile> </profile>
</profiles> </profiles>
<build> <build>
<resources> <resources>
<resource> <resource>

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.constant; package com.sismics.docs.core.constant;
/** /**
* Configuration parameters. * Configuration parameters.
* *
* @author jtremeaux * @author jtremeaux
*/ */
public enum ConfigType { public enum ConfigType {
/** /**
@ -20,6 +20,11 @@ public enum ConfigType {
*/ */
GUEST_LOGIN, GUEST_LOGIN,
/**
* OCR enabled.
*/
OCR_ENABLED,
/** /**
* Default language. * Default language.
*/ */
@ -40,6 +45,7 @@ public enum ConfigType {
INBOX_ENABLED, INBOX_ENABLED,
INBOX_HOSTNAME, INBOX_HOSTNAME,
INBOX_PORT, INBOX_PORT,
INBOX_STARTTLS,
INBOX_USERNAME, INBOX_USERNAME,
INBOX_PASSWORD, INBOX_PASSWORD,
INBOX_FOLDER, INBOX_FOLDER,
@ -53,6 +59,7 @@ public enum ConfigType {
LDAP_ENABLED, LDAP_ENABLED,
LDAP_HOST, LDAP_HOST,
LDAP_PORT, LDAP_PORT,
LDAP_USESSL,
LDAP_ADMIN_DN, LDAP_ADMIN_DN,
LDAP_ADMIN_PASSWORD, LDAP_ADMIN_PASSWORD,
LDAP_BASE_DN, LDAP_BASE_DN,

View File

@ -43,7 +43,7 @@ public class Constants {
/** /**
* Supported document languages. * Supported document languages.
*/ */
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie"); public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces", "sqi");
/** /**
* Base URL environment variable. * Base URL environment variable.

View File

@ -10,8 +10,8 @@ import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.SecurityUtil; import com.sismics.docs.core.util.SecurityUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;

View File

@ -12,7 +12,7 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;

View File

@ -6,9 +6,9 @@ import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.docs.core.util.AuditLogUtil; import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.model.jpa.Config; import com.sismics.docs.core.model.jpa.Config;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
/** /**
* Configuration parameter DAO. * Configuration parameter DAO.

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor; import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;

View File

@ -7,9 +7,10 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.AuditLogUtil; import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -50,10 +51,9 @@ public class DocumentDao {
* @param limit Limit * @param limit Limit
* @return List of documents * @return List of documents
*/ */
@SuppressWarnings("unchecked")
public List<Document> findAll(int offset, int limit) { public List<Document> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
q.setFirstResult(offset); q.setFirstResult(offset);
q.setMaxResults(limit); q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
@ -65,10 +65,9 @@ public class DocumentDao {
* @param userId User ID * @param userId User ID
* @return List of documents * @return List of documents
*/ */
@SuppressWarnings("unchecked")
public List<Document> findByUserId(String userId) { public List<Document> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} }
@ -88,7 +87,7 @@ public class DocumentDao {
} }
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, "); StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, d.DOC_IDFILE_C,");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, "); sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, "); sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, ");
sb.append(" u.USE_USERNAME_C "); sb.append(" u.USE_USERNAME_C ");
@ -122,6 +121,7 @@ public class DocumentDao {
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime()); documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setUpdateTimestamp(((Timestamp) o[i++]).getTime()); documentDto.setUpdateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]); documentDto.setLanguage((String) o[i++]);
documentDto.setFileId((String) o[i++]);
documentDto.setShared(((Number) o[i++]).intValue() > 0); documentDto.setShared(((Number) o[i++]).intValue() > 0);
documentDto.setFileCount(((Number) o[i++]).intValue()); documentDto.setFileCount(((Number) o[i++]).intValue());
documentDto.setCreator((String) o[i]); documentDto.setCreator((String) o[i]);
@ -138,16 +138,16 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document // Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", id); dq.setParameter("id", id);
Document documentDb = (Document) q.getSingleResult(); Document documentDb = dq.getSingleResult();
// Delete the document // Delete the document
Date dateNow = new Date(); Date dateNow = new Date();
documentDb.setDeleteDate(dateNow); documentDb.setDeleteDate(dateNow);
// Delete linked data // Delete linked data
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null"); Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
q.setParameter("documentId", id); q.setParameter("documentId", id);
q.setParameter("dateNow", dateNow); q.setParameter("dateNow", dateNow);
q.executeUpdate(); q.executeUpdate();
@ -179,10 +179,10 @@ public class DocumentDao {
*/ */
public Document getById(String id) { public Document getById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", id); q.setParameter("id", id);
try { try {
return (Document) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
@ -199,9 +199,9 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document // Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", document.getId()); q.setParameter("id", document.getId());
Document documentDb = (Document) q.getSingleResult(); Document documentDb = q.getSingleResult();
// Update the document // Update the document
documentDb.setTitle(document.getTitle()); documentDb.setTitle(document.getTitle());
@ -237,7 +237,6 @@ public class DocumentDao {
query.setParameter("fileId", document.getFileId()); query.setParameter("fileId", document.getFileId());
query.setParameter("id", document.getId()); query.setParameter("id", document.getId());
query.executeUpdate(); query.executeUpdate();
} }
/** /**

View File

@ -5,8 +5,8 @@ import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata; import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;

View File

@ -4,12 +4,16 @@ import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.model.jpa.File; import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.AuditLogUtil; import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import javax.persistence.EntityManager; import java.util.Collections;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
/** /**
@ -47,10 +51,9 @@ public class FileDao {
* @param limit Limit * @param limit Limit
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> findAll(int offset, int limit) { public List<File> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
q.setFirstResult(offset); q.setFirstResult(offset);
q.setMaxResults(limit); q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
@ -62,28 +65,38 @@ public class FileDao {
* @param userId User ID * @param userId User ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> findByUserId(String userId) { public List<File> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} }
/**
* Returns a list of active files.
*
* @param ids Files IDs
* @return List of files
*/
public List<File> getFiles(List<String> ids) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
q.setParameter("ids", ids);
return q.getResultList();
}
/** /**
* Returns an active file. * Returns an active file or null.
* *
* @param id File ID * @param id File ID
* @return Document * @return File
*/ */
public File getFile(String id) { public File getFile(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); List<File> files = getFiles(List.of(id));
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); if (files.isEmpty()) {
q.setParameter("id", id);
try {
return (File) q.getSingleResult();
} catch (NoResultException e) {
return null; return null;
} else {
return files.get(0);
} }
} }
@ -92,15 +105,15 @@ public class FileDao {
* *
* @param id File ID * @param id File ID
* @param userId User ID * @param userId User ID
* @return Document * @return File
*/ */
public File getFile(String id, String userId) { public File getFile(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
q.setParameter("userId", userId); q.setParameter("userId", userId);
try { try {
return (File) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
@ -116,9 +129,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file // Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
File fileDb = (File) q.getSingleResult(); File fileDb = q.getSingleResult();
// Delete the file // Delete the file
Date dateNow = new Date(); Date dateNow = new Date();
@ -138,9 +151,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file // Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", file.getId()); q.setParameter("id", file.getId());
File fileDb = (File) q.getSingleResult(); File fileDb = q.getSingleResult();
// Update the file // Update the file
fileDb.setDocumentId(file.getDocumentId()); fileDb.setDocumentId(file.getDocumentId());
@ -150,6 +163,7 @@ public class FileDao {
fileDb.setMimeType(file.getMimeType()); fileDb.setMimeType(file.getMimeType());
fileDb.setVersionId(file.getVersionId()); fileDb.setVersionId(file.getVersionId());
fileDb.setLatestVersion(file.isLatestVersion()); fileDb.setLatestVersion(file.isLatestVersion());
fileDb.setSize(file.getSize());
return file; return file;
} }
@ -162,46 +176,82 @@ public class FileDao {
*/ */
public File getActiveById(String id) { public File getActiveById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
try { try {
return (File) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
} }
/** /**
* Get files by document ID or all orphan files of an user. * Get files by document ID or all orphan files of a user.
* *
* @param userId User ID * @param userId User ID
* @param documentId Document ID * @param documentId Document ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> getByDocumentId(String userId, String documentId) { public List<File> getByDocumentId(String userId, String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
if (documentId == null) { if (documentId == null) {
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc"); TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} else {
return getByDocumentsIds(Collections.singleton(documentId));
} }
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc"); }
q.setParameter("documentId", documentId);
/**
* Get files by documents IDs.
*
* @param documentIds Documents IDs
* @return List of files
*/
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("documentIds", documentIds);
return q.getResultList(); return q.getResultList();
} }
/**
* Get files count by documents IDs.
*
* @param documentIds Documents IDs
* @return the number of files per document id
*/
public Map<String, Long> countByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f.documentId, count(*) from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null group by (f.documentId)");
q.setParameter("documentIds", documentIds);
Map<String, Long> result = new HashMap<>();
q.getResultList().forEach(o -> {
Object[] resultLine = (Object[]) o;
result.put((String) resultLine[0], (Long) resultLine[1]);
});
return result;
}
/** /**
* Get all files from a version. * Get all files from a version.
* *
* @param versionId Version ID * @param versionId Version ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> getByVersionId(String versionId) { public List<File> getByVersionId(String versionId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc"); TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("versionId", versionId); q.setParameter("versionId", versionId);
return q.getResultList(); return q.getResultList();
} }
public List<File> getFilesWithUnknownSize(int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.size = :size and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("size", File.UNKNOWN_SIZE);
q.setMaxResults(limit);
return q.getResultList();
}
} }

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.*; import java.util.*;
/** /**
@ -183,12 +183,10 @@ public class GroupDao {
} }
criteriaList.add("g.GRP_DELETEDATE_D is null"); criteriaList.add("g.GRP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.*; import java.util.*;
/** /**
@ -123,10 +123,8 @@ public class MetadataDao {
criteriaList.add("m.MET_DELETEDATE_D is null"); criteriaList.add("m.MET_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -6,9 +6,9 @@ import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DurationFieldType; import org.joda.time.DurationFieldType;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.RelationDto;
import com.sismics.docs.core.model.jpa.Relation; import com.sismics.docs.core.model.jpa.Relation;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.*; import java.util.*;
/** /**

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.Set; import java.util.Set;
/** /**

View File

@ -11,7 +11,7 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -64,10 +64,8 @@ public class RouteDao {
} }
criteriaList.add("r.RTE_DELETEDATE_D is null"); criteriaList.add("r.RTE_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -145,10 +145,8 @@ public class RouteModelDao {
criteriaList.add("rm.RTM_DELETEDATE_D is null"); criteriaList.add("rm.RTM_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -12,8 +12,8 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -90,10 +90,8 @@ public class RouteStepDao {
} }
criteriaList.add("rs.RTP_DELETEDATE_D is null"); criteriaList.add("rs.RTP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Share; import com.sismics.docs.core.model.jpa.Share;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
@ -19,7 +19,6 @@ public class ShareDao {
* *
* @param share Share * @param share Share
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(Share share) { public String create(Share share) {
// Create the UUID // Create the UUID

View File

@ -13,9 +13,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.*; import java.util.*;
/** /**
@ -199,10 +199,8 @@ public class TagDao {
criteriaList.add("t.TAG_DELETEDATE_D is null"); criteriaList.add("t.TAG_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -1,6 +1,7 @@
package com.sismics.docs.core.dao; package com.sismics.docs.core.dao;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import at.favre.lib.crypto.bcrypt.BCrypt; import at.favre.lib.crypto.bcrypt.BCrypt;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -18,9 +19,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -289,7 +290,7 @@ public class UserDao {
private String hashPassword(String password) { private String hashPassword(String password) {
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK; int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV); String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
if (envBcryptWork != null) { if (!Strings.isNullOrEmpty(envBcryptWork)) {
try { try {
int envBcryptWorkInt = Integer.parseInt(envBcryptWork); int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) { if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {

View File

@ -3,9 +3,9 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Vocabulary; import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@ -20,7 +20,6 @@ public class VocabularyDao {
* *
* @param vocabulary Vocabulary * @param vocabulary Vocabulary
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(Vocabulary vocabulary) { public String create(Vocabulary vocabulary) {
// Create the UUID // Create the UUID

View File

@ -9,9 +9,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.NoResultException; import jakarta.persistence.NoResultException;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -42,11 +42,9 @@ public class WebhookDao {
} }
criteriaList.add("w.WHK_DELETEDATE_D is null"); criteriaList.add("w.WHK_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")

View File

@ -1,5 +1,6 @@
package com.sismics.docs.core.dao.criteria; package com.sismics.docs.core.dao.criteria;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -18,7 +19,7 @@ public class DocumentCriteria {
/** /**
* Search query. * Search query.
*/ */
private String search; private String simpleSearch;
/** /**
* Full content search query. * Full content search query.
@ -49,13 +50,13 @@ public class DocumentCriteria {
* Tag IDs. * Tag IDs.
* The first level list will be AND'ed and the second level list will be OR'ed. * The first level list will be AND'ed and the second level list will be OR'ed.
*/ */
private List<List<String>> tagIdList; private List<List<String>> tagIdList = new ArrayList<>();
/** /**
* Tag IDs to exclude. * Tag IDs to exclude.
* The first and second level list will be excluded. * The first and second level list will be excluded.
*/ */
private List<List<String>> excludedTagIdList; private List<List<String>> excludedTagIdList = new ArrayList<>();
/** /**
* Shared status. * Shared status.
@ -83,9 +84,9 @@ public class DocumentCriteria {
private String mimeType; private String mimeType;
/** /**
* The title. * Titles to include.
*/ */
private String title; private List<String> titleList = new ArrayList<>();
public List<String> getTargetIdList() { public List<String> getTargetIdList() {
return targetIdList; return targetIdList;
@ -95,12 +96,12 @@ public class DocumentCriteria {
this.targetIdList = targetIdList; this.targetIdList = targetIdList;
} }
public String getSearch() { public String getSimpleSearch() {
return search; return simpleSearch;
} }
public void setSearch(String search) { public void setSimpleSearch(String search) {
this.search = search; this.simpleSearch = search;
} }
public String getFullSearch() { public String getFullSearch() {
@ -131,19 +132,10 @@ public class DocumentCriteria {
return tagIdList; return tagIdList;
} }
public void setTagIdList(List<List<String>> tagIdList) {
this.tagIdList = tagIdList;
}
public List<List<String>> getExcludedTagIdList() { public List<List<String>> getExcludedTagIdList() {
return excludedTagIdList; return excludedTagIdList;
} }
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
this.excludedTagIdList = excludedTagIdList;
return this;
}
public Boolean getShared() { public Boolean getShared() {
return shared; return shared;
} }
@ -167,11 +159,7 @@ public class DocumentCriteria {
public void setCreatorId(String creatorId) { public void setCreatorId(String creatorId) {
this.creatorId = creatorId; this.creatorId = creatorId;
} }
public Boolean getActiveRoute() {
return activeRoute;
}
public Date getUpdateDateMin() { public Date getUpdateDateMin() {
return updateDateMin; return updateDateMin;
} }
@ -188,6 +176,10 @@ public class DocumentCriteria {
this.updateDateMax = updateDateMax; this.updateDateMax = updateDateMax;
} }
public Boolean getActiveRoute() {
return activeRoute;
}
public void setActiveRoute(Boolean activeRoute) { public void setActiveRoute(Boolean activeRoute) {
this.activeRoute = activeRoute; this.activeRoute = activeRoute;
} }
@ -200,11 +192,7 @@ public class DocumentCriteria {
this.mimeType = mimeType; this.mimeType = mimeType;
} }
public String getTitle() { public List<String> getTitleList() {
return title; return titleList;
}
public void setTitle(String title) {
this.title = title;
} }
} }

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.RouteStepType; import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.util.JsonUtil; import com.sismics.util.JsonUtil;
import javax.json.Json; import jakarta.json.Json;
import javax.json.JsonObjectBuilder; import jakarta.json.JsonObjectBuilder;
/** /**
* Route step DTO. * Route step DTO.

View File

@ -13,6 +13,8 @@ public class FileDeletedAsyncEvent extends UserEvent {
*/ */
private String fileId; private String fileId;
private Long fileSize;
public String getFileId() { public String getFileId() {
return fileId; return fileId;
} }
@ -21,10 +23,19 @@ public class FileDeletedAsyncEvent extends UserEvent {
this.fileId = fileId; this.fileId = fileId;
} }
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)
.add("fileId", fileId) .add("fileId", fileId)
.add("fileSize", fileSize)
.toString(); .toString();
} }
} }

View File

@ -2,8 +2,11 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents; import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe; import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent; import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil; import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil; import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -11,7 +14,7 @@ import org.slf4j.LoggerFactory;
/** /**
* Listener on file deleted. * Listener on file deleted.
* *
* @author bgamard * @author bgamard
*/ */
public class FileDeletedAsyncListener { public class FileDeletedAsyncListener {
@ -22,7 +25,7 @@ public class FileDeletedAsyncListener {
/** /**
* File deleted. * File deleted.
* *
* @param event File deleted event * @param event File deleted event
* @throws Exception e * @throws Exception e
*/ */
@ -32,6 +35,24 @@ public class FileDeletedAsyncListener {
if (log.isInfoEnabled()) { if (log.isInfoEnabled()) {
log.info("File deleted event: " + event.toString()); log.info("File deleted event: " + event.toString());
} }
TransactionUtil.handle(() -> {
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId());
if (user != null) {
Long fileSize = event.getFileSize();
if (fileSize.equals(File.UNKNOWN_SIZE)) {
// The file size was not in the database, in this case we need to get from the unencrypted size.
fileSize = FileUtil.getFileSize(event.getFileId(), user);
}
if (! fileSize.equals(File.UNKNOWN_SIZE)) {
user.setStorageCurrent(user.getStorageCurrent() - fileSize);
userDao.updateQuota(user);
}
}
});
// Delete the file from storage // Delete the file from storage
FileUtil.delete(event.getFileId()); FileUtil.delete(event.getFileId());

View File

@ -1,5 +1,6 @@
package com.sismics.docs.core.model.context; package com.sismics.docs.core.model.context;
import com.google.common.base.Strings;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus; import com.google.common.eventbus.EventBus;
@ -8,6 +9,7 @@ import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.listener.async.*; import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User; import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService; import com.sismics.docs.core.service.FileService;
import com.sismics.docs.core.service.FileSizeService;
import com.sismics.docs.core.service.InboxService; import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.util.PdfUtil; import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.indexing.IndexingHandler; import com.sismics.docs.core.util.indexing.IndexingHandler;
@ -64,6 +66,11 @@ public class AppContext {
*/ */
private FileService fileService; private FileService fileService;
/**
* File size service.
*/
private FileSizeService fileSizeService;
/** /**
* Asynchronous executors. * Asynchronous executors.
*/ */
@ -101,12 +108,17 @@ public class AppContext {
inboxService.startAsync(); inboxService.startAsync();
inboxService.awaitRunning(); inboxService.awaitRunning();
// Start file size service
fileSizeService = new FileSizeService();
fileSizeService.startAsync();
fileSizeService.awaitRunning();
// Register fonts // Register fonts
PdfUtil.registerFonts(); PdfUtil.registerFonts();
// Change the admin password if needed // Change the admin password if needed
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV); String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
if (envAdminPassword != null) { if (!Strings.isNullOrEmpty(envAdminPassword)) {
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin"); User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) { if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
@ -117,7 +129,7 @@ public class AppContext {
// Change the admin email if needed // Change the admin email if needed
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV); String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
if (envAdminEmail != null) { if (!Strings.isNullOrEmpty(envAdminEmail)) {
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin"); User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) { if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {
@ -237,6 +249,10 @@ public class AppContext {
fileService.stopAsync(); fileService.stopAsync();
} }
if (fileSizeService != null) {
fileSizeService.stopAsync();
}
instance = null; instance = null;
} }
} }

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AclType; import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType; import com.sismics.docs.core.constant.PermType;
import javax.persistence.*; import jakarta.persistence.*;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,12 +2,12 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.EnumType; import jakarta.persistence.EnumType;
import javax.persistence.Enumerated; import jakarta.persistence.Enumerated;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AuditLogType; import com.sismics.docs.core.constant.AuditLogType;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa; package com.sismics.docs.core.model.jpa;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -1,11 +1,11 @@
package com.sismics.docs.core.model.jpa; package com.sismics.docs.core.model.jpa;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.EnumType; import jakarta.persistence.EnumType;
import javax.persistence.Enumerated; import jakarta.persistence.Enumerated;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.ConfigType; import com.sismics.docs.core.constant.ConfigType;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa; package com.sismics.docs.core.model.jpa;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.io.Serializable; import java.io.Serializable;
/** /**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.sismics.util.mime.MimeTypeUtil; import com.sismics.util.mime.MimeTypeUtil;
import javax.persistence.*; import jakarta.persistence.*;
import java.util.Date; import java.util.Date;
/** /**
@ -88,6 +88,14 @@ public class File implements Loggable {
@Column(name = "FIL_LATESTVERSION_B", nullable = false) @Column(name = "FIL_LATESTVERSION_B", nullable = false)
private boolean latestVersion; private boolean latestVersion;
public static final Long UNKNOWN_SIZE = -1L;
/**
* Can be {@link File#UNKNOWN_SIZE} if the size has not been stored in the database when the file has been uploaded
*/
@Column(name = "FIL_SIZE_N", nullable = false)
private Long size;
/** /**
* Private key to decrypt the file. * Private key to decrypt the file.
* Not saved to database, of course. * Not saved to database, of course.
@ -204,6 +212,18 @@ public class File implements Loggable {
return this; return this;
} }
/**
* Can return {@link File#UNKNOWN_SIZE} if the file size is not stored in the database.
*/
public Long getSize() {
return size;
}
public File setSize(Long size) {
this.size = size;
return this;
}
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.MetadataType; import com.sismics.docs.core.constant.MetadataType;
import javax.persistence.*; import jakarta.persistence.*;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.RouteStepTransition; import com.sismics.docs.core.constant.RouteStepTransition;
import com.sismics.docs.core.constant.RouteStepType; import com.sismics.docs.core.constant.RouteStepType;
import javax.persistence.*; import jakarta.persistence.*;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa; package com.sismics.docs.core.model.jpa;
import javax.persistence.Column; import jakarta.persistence.Column;
import javax.persistence.Entity; import jakarta.persistence.Entity;
import javax.persistence.Id; import jakarta.persistence.Id;
import javax.persistence.Table; import jakarta.persistence.Table;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.WebhookEvent; import com.sismics.docs.core.constant.WebhookEvent;
import javax.persistence.*; import jakarta.persistence.*;
import java.util.Date; import java.util.Date;
/** /**

View File

@ -69,13 +69,18 @@ public class FileService extends AbstractScheduledService {
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS); return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
} }
public Path createTemporaryFile() throws IOException {
return createTemporaryFile(null);
}
/** /**
* Create a temporary file. * Create a temporary file.
* *
* @param name Wanted file name
* @return New temporary file * @return New temporary file
*/ */
public Path createTemporaryFile() throws IOException { public Path createTemporaryFile(String name) throws IOException {
Path path = Files.createTempFile("sismics_docs", null); Path path = Files.createTempFile("sismics_docs", name);
referenceSet.add(new TemporaryPathReference(path, referenceQueue)); referenceSet.add(new TemporaryPathReference(path, referenceQueue));
return path; return path;
} }

View File

@ -0,0 +1,78 @@
package com.sismics.docs.core.service;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Service that retrieve files sizes when they are not in the database.
*/
public class FileSizeService extends AbstractScheduledService {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileSizeService.class);
public FileSizeService() {
}
@Override
protected void startUp() {
log.info("File size service starting up");
}
@Override
protected void shutDown() {
log.info("File size service shutting down");
}
private static final int BATCH_SIZE = 30;
@Override
protected void runOneIteration() {
try {
TransactionUtil.handle(() -> {
FileDao fileDao = new FileDao();
List<File> files = fileDao.getFilesWithUnknownSize(BATCH_SIZE);
for(File file : files) {
processFile(file);
}
if(files.size() < BATCH_SIZE) {
log.info("No more file to process, stopping the service");
stopAsync();
}
});
} catch (Throwable e) {
log.error("Exception during file service iteration", e);
}
}
void processFile(File file) {
UserDao userDao = new UserDao();
User user = userDao.getById(file.getUserId());
if(user == null) {
return;
}
long fileSize = FileUtil.getFileSize(file.getId(), user);
if(fileSize != File.UNKNOWN_SIZE){
FileDao fileDao = new FileDao();
file.setSize(fileSize);
fileDao.update(file);
}
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedDelaySchedule(0, 1, TimeUnit.MINUTES);
}
}

View File

@ -1,14 +1,11 @@
package com.sismics.docs.core.service; package com.sismics.docs.core.service;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService; import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType; import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao; import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria; import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto; import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent; import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document; import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag; import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.ConfigUtil; import com.sismics.docs.core.util.ConfigUtil;
@ -18,7 +15,7 @@ import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.EmailUtil; import com.sismics.util.EmailUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -85,7 +82,7 @@ public class InboxService extends AbstractScheduledService {
lastSyncDate = new Date(); lastSyncDate = new Date();
lastSyncMessageCount = 0; lastSyncMessageCount = 0;
try { try {
HashMap<String, String> tagsNameToId = getAllTags(); Map<String, String> tagsNameToId = getAllTags();
inbox = openInbox(); inbox = openInbox();
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false)); Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
@ -159,6 +156,7 @@ public class InboxService extends AbstractScheduledService {
String port = ConfigUtil.getConfigStringValue(ConfigType.INBOX_PORT); String port = ConfigUtil.getConfigStringValue(ConfigType.INBOX_PORT);
properties.put("mail.imap.host", ConfigUtil.getConfigStringValue(ConfigType.INBOX_HOSTNAME)); properties.put("mail.imap.host", ConfigUtil.getConfigStringValue(ConfigType.INBOX_HOSTNAME));
properties.put("mail.imap.port", port); properties.put("mail.imap.port", port);
properties.setProperty("mail.imap.starttls.enable", ConfigUtil.getConfigStringValue(ConfigType.INBOX_STARTTLS).toString());
boolean isSsl = "993".equals(port); boolean isSsl = "993".equals(port);
properties.put("mail.imap.ssl.enable", String.valueOf(isSsl)); properties.put("mail.imap.ssl.enable", String.valueOf(isSsl));
properties.setProperty("mail.imap.socketFactory.class", properties.setProperty("mail.imap.socketFactory.class",
@ -192,7 +190,7 @@ public class InboxService extends AbstractScheduledService {
* @param message Message * @param message Message
* @throws Exception e * @throws Exception e
*/ */
private void importMessage(Message message, HashMap<String, String> tags) throws Exception { private void importMessage(Message message, Map<String, String> tags) throws Exception {
log.info("Importing message: " + message.getSubject()); log.info("Importing message: " + message.getSubject());
// Parse the mail // Parse the mail
@ -273,16 +271,16 @@ public class InboxService extends AbstractScheduledService {
/** /**
* Fetches a HashMap with all tag names as keys and their respective ids as values. * Fetches a HashMap with all tag names as keys and their respective ids as values.
* *
* @return HashMap with all tags or null if not enabled * @return Map with all tags or null if not enabled
*/ */
private HashMap<String, String> getAllTags() { private Map<String, String> getAllTags() {
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) { if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
return null; return null;
} }
TagDao tagDao = new TagDao(); TagDao tagDao = new TagDao();
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true)); List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
HashMap<String, String> tagsNameToId = new HashMap<>(); Map<String, String> tagsNameToId = new HashMap<>();
for (TagDto tagDto : tags) { for (TagDto tagDto : tags) {
tagsNameToId.put(tagDto.getName(), tagDto.getId()); tagsNameToId.put(tagDto.getName(), tagDto.getId());
} }

View File

@ -9,7 +9,7 @@ import com.sismics.docs.core.util.action.RemoveTagAction;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.json.JsonObject; import jakarta.json.JsonObject;
/** /**
* Action utilities. * Action utilities.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.model.jpa.AuditLog;
import com.sismics.docs.core.model.jpa.Loggable; import com.sismics.docs.core.model.jpa.Loggable;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
/** /**
* Audit log utilities. * Audit log utilities.

View File

@ -8,13 +8,12 @@ import java.util.ResourceBundle;
/** /**
* Configuration parameter utilities. * Configuration parameter utilities.
* *
* @author jtremeaux
*/ */
public class ConfigUtil { public class ConfigUtil {
/** /**
* Returns the textual value of a configuration parameter. * Returns the textual value of a configuration parameter.
* *
* @param configType Type of the configuration parameter * @param configType Type of the configuration parameter
* @return Textual value of the configuration parameter * @return Textual value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined * @throws IllegalStateException Configuration parameter undefined
@ -30,7 +29,7 @@ public class ConfigUtil {
/** /**
* Returns the configuration resource bundle. * Returns the configuration resource bundle.
* *
* @return Resource bundle * @return Resource bundle
*/ */
public static ResourceBundle getConfigBundle() { public static ResourceBundle getConfigBundle() {
@ -39,14 +38,14 @@ public class ConfigUtil {
/** /**
* Returns the integer value of a configuration parameter. * Returns the integer value of a configuration parameter.
* *
* @param configType Type of the configuration parameter * @param configType Type of the configuration parameter
* @return Integer value of the configuration parameter * @return Integer value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined * @throws IllegalStateException Configuration parameter undefined
*/ */
public static int getConfigIntegerValue(ConfigType configType) { public static int getConfigIntegerValue(ConfigType configType) {
String value = getConfigStringValue(configType); String value = getConfigStringValue(configType);
return Integer.parseInt(value); return Integer.parseInt(value);
} }
@ -65,14 +64,28 @@ public class ConfigUtil {
/** /**
* Returns the boolean value of a configuration parameter. * Returns the boolean value of a configuration parameter.
* *
* @param configType Type of the configuration parameter * @param configType Type of the configuration parameter
* @return Boolean value of the configuration parameter * @return Boolean value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined * @throws IllegalStateException Configuration parameter undefined
*/ */
public static boolean getConfigBooleanValue(ConfigType configType) { public static boolean getConfigBooleanValue(ConfigType configType) {
String value = getConfigStringValue(configType); String value = getConfigStringValue(configType);
return Boolean.parseBoolean(value); return Boolean.parseBoolean(value);
} }
/**
* Returns the boolean value of a configuration parameter with a default value.
*
* @param configType Type of the configuration parameter
* @param defaultValue Default value to return if the configuration parameter is undefined
* @return Boolean value of the configuration parameter
*/
public static boolean getConfigBooleanValue(ConfigType configType, boolean defaultValue) {
try {
return getConfigBooleanValue(configType);
} catch (IllegalStateException e) {
return defaultValue;
}
}
} }

View File

@ -5,7 +5,7 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import com.sismics.util.EnvironmentUtil; import com.sismics.util.EnvironmentUtil;

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.util; package com.sismics.docs.core.util;
import com.google.common.base.Charsets;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
@ -17,7 +16,10 @@ import com.sismics.util.Scalr;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread; import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil; import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.CountingInputStream;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -28,6 +30,7 @@ import java.awt.image.BufferedImage;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.*; import java.util.*;
@ -46,7 +49,7 @@ public class FileUtil {
/** /**
* File ID of files currently being processed. * File ID of files currently being processed.
*/ */
private static Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>()); private static final Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
/** /**
* Optical character recognition on an image. * Optical character recognition on an image.
@ -76,7 +79,7 @@ public class FileUtil {
// Consume the data as text // Consume the data as text
try (InputStream is = process.getInputStream()) { try (InputStream is = process.getInputStream()) {
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8)); return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
} }
} }
@ -149,6 +152,7 @@ public class FileUtil {
file.setName(StringUtils.abbreviate(name, 200)); file.setName(StringUtils.abbreviate(name, 200));
file.setMimeType(mimeType); file.setMimeType(mimeType);
file.setUserId(userId); file.setUserId(userId);
file.setSize(fileSize);
// Get files of this document // Get files of this document
FileDao fileDao = new FileDao(); FileDao fileDao = new FileDao();
@ -240,4 +244,31 @@ public class FileUtil {
public static boolean isProcessingFile(String fileId) { public static boolean isProcessingFile(String fileId) {
return processingFileSet.contains(fileId); return processingFileSet.contains(fileId);
} }
/**
* Get the size of a file on disk.
*
* @param fileId the file id
* @param user the file owner
* @return the size or -1 if something went wrong
*/
public static long getFileSize(String fileId, User user) {
// To get the size we copy the decrypted content into a null output stream
// and count the copied byte size.
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
if (! Files.exists(storedFile)) {
log.debug("File does not exist " + fileId);
return File.UNKNOWN_SIZE;
}
try (InputStream fileInputStream = Files.newInputStream(storedFile);
InputStream inputStream = EncryptionUtil.decryptInputStream(fileInputStream, user.getPrivateKey());
CountingInputStream countingInputStream = new CountingInputStream(inputStream);
) {
IOUtils.copy(countingInputStream, NullOutputStream.NULL_OUTPUT_STREAM);
return countingInputStream.getByteCount();
} catch (Exception e) {
log.debug("Can't find size of file " + fileId, e);
return File.UNKNOWN_SIZE;
}
}
} }

View File

@ -10,9 +10,9 @@ import com.sismics.docs.core.dao.dto.MetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata; import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
import javax.json.Json; import jakarta.json.Json;
import javax.json.JsonArrayBuilder; import jakarta.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder; import jakarta.json.JsonObjectBuilder;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

View File

@ -19,10 +19,10 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.RouteModel; import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.json.Json; import jakarta.json.Json;
import javax.json.JsonArray; import jakarta.json.JsonArray;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import javax.json.JsonReader; import jakarta.json.JsonReader;
import java.io.StringReader; import java.io.StringReader;
import java.util.List; import java.util.List;

View File

@ -5,8 +5,8 @@ import com.sismics.util.jpa.EMF;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.EntityTransaction; import jakarta.persistence.EntityTransaction;
/** /**
* Database transaction utils. * Database transaction utils.

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.util.action;
import com.sismics.docs.core.dao.dto.DocumentDto; import com.sismics.docs.core.dao.dto.DocumentDto;
import javax.json.JsonObject; import jakarta.json.JsonObject;
/** /**
* Base action interface. * Base action interface.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto; import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto; import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;

View File

@ -13,7 +13,7 @@ import com.sismics.util.context.ThreadLocalContext;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto; import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto; import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;

View File

@ -4,7 +4,7 @@ import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria; import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto; import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import java.util.List; import java.util.List;
/** /**

View File

@ -13,10 +13,9 @@ import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value; import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.message.SearchScope; import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.DefaultLdapConnectionFactory; import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig; import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapConnectionPool; import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -35,59 +34,41 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class); private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/** /**
* LDAP connection pool. * Get a LDAP connection.
* @return LdapConnection
*/ */
private static LdapConnectionPool pool; private LdapConnection getConnection() {
/**
* Reset the LDAP pool.
*/
public static void reset() {
if (pool != null) {
try {
pool.close();
} catch (Exception e) {
// NOP
}
}
pool = null;
}
/**
* Initialize the LDAP pool.
*/
private static void init() {
ConfigDao configDao = new ConfigDao(); ConfigDao configDao = new ConfigDao();
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED); Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
if (pool != null || ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) { if (ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return; return null;
} }
LdapConnectionConfig config = new LdapConnectionConfig(); LdapConnectionConfig config = new LdapConnectionConfig();
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST)); config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT)); config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
config.setUseSsl(ConfigUtil.getConfigBooleanValue(ConfigType.LDAP_USESSL));
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN)); config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD)); config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config); return new LdapNetworkConnection(config);
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), null);
} }
@Override @Override
public User authenticate(String username, String password) { public User authenticate(String username, String password) {
init();
if (pool == null) {
return null;
}
// Fetch and authenticate the user // Fetch and authenticate the user
Entry userEntry; Entry userEntry;
try { try (LdapConnection ldapConnection = getConnection()) {
EntryCursor cursor = pool.getConnection().search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN), if (ldapConnection == null) {
return null;
}
ldapConnection.bind();
EntryCursor cursor = ldapConnection.search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE); ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
if (cursor.next()) { if (cursor.next()) {
userEntry = cursor.get(); userEntry = cursor.get();
pool.getConnection().bind(userEntry.getDn(), password); ldapConnection.bind(userEntry.getDn(), password);
} else { } else {
// User not found // User not found
return null; return null;

View File

@ -3,7 +3,6 @@ package com.sismics.docs.core.util.format;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.sismics.util.ClasspathScanner; import com.sismics.util.ClasspathScanner;
import java.lang.reflect.InvocationTargetException;
import java.util.List; import java.util.List;
/** /**

View File

@ -3,6 +3,8 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer; import com.google.common.io.Closer;
import com.sismics.docs.core.constant.Constants; import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.util.FileUtil; import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType; import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting; import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocument;
@ -22,7 +24,6 @@ import java.nio.file.Path;
/** /**
* Image format handler. * Image format handler.
* *
* @author bgamard
*/ */
public class ImageFormatHandler implements FormatHandler { public class ImageFormatHandler implements FormatHandler {
/** /**
@ -45,7 +46,7 @@ public class ImageFormatHandler implements FormatHandler {
@Override @Override
public String extractContent(String language, Path file) throws Exception { public String extractContent(String language, Path file) throws Exception {
if (language == null) { if (language == null || !ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
return null; return null;
} }

View File

@ -2,6 +2,8 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer; import com.google.common.io.Closer;
import com.sismics.docs.core.util.FileUtil; import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType; import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting; import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.multipdf.PDFMergerUtility; import org.apache.pdfbox.multipdf.PDFMergerUtility;
@ -53,7 +55,7 @@ public class PdfFormatHandler implements FormatHandler {
} }
// No text content, try to OCR it // No text content, try to OCR it
if (language != null && content != null && content.trim().isEmpty()) { if (language != null && content != null && content.trim().isEmpty() && ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
try (InputStream inputStream = Files.newInputStream(file); try (InputStream inputStream = Files.newInputStream(file);
PDDocument pdfDocument = PDDocument.load(inputStream)) { PDDocument pdfDocument = PDDocument.load(inputStream)) {

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format; package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.io.Closer; import com.google.common.io.Closer;
import com.lowagie.text.*; import com.lowagie.text.*;
import com.lowagie.text.pdf.PdfWriter; import com.lowagie.text.pdf.PdfWriter;
@ -11,6 +10,7 @@ import org.apache.pdfbox.pdmodel.PDDocument;
import java.awt.image.BufferedImage; import java.awt.image.BufferedImage;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@ -33,7 +33,7 @@ public class TextPlainFormatHandler implements FormatHandler {
PdfWriter.getInstance(output, pdfOutputStream); PdfWriter.getInstance(output, pdfOutputStream);
output.open(); output.open();
String content = new String(Files.readAllBytes(file), Charsets.UTF_8); String content = Files.readString(file, StandardCharsets.UTF_8);
Font font = FontFactory.getFont("LiberationMono-Regular"); Font font = FontFactory.getFont("LiberationMono-Regular");
Paragraph paragraph = new Paragraph(content, font); Paragraph paragraph = new Paragraph(content, font);
paragraph.setAlignment(Element.ALIGN_LEFT); paragraph.setAlignment(Element.ALIGN_LEFT);
@ -46,7 +46,7 @@ public class TextPlainFormatHandler implements FormatHandler {
@Override @Override
public String extractContent(String language, Path file) throws Exception { public String extractContent(String language, Path file) throws Exception {
return new String(Files.readAllBytes(file), "UTF-8"); return Files.readString(file, StandardCharsets.UTF_8);
} }
@Override @Override

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format; package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams; import com.google.common.io.ByteStreams;
import com.google.common.io.Closer; import com.google.common.io.Closer;
@ -13,6 +12,7 @@ import javax.imageio.ImageIO;
import java.awt.image.BufferedImage; import java.awt.image.BufferedImage;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
// Consume the data as a string // Consume the data as a string
try (InputStream is = process.getInputStream()) { try (InputStream is = process.getInputStream()) {
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8); return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
} catch (Exception e) { } catch (Exception e) {
return null; return null;
} }

View File

@ -26,9 +26,18 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*; import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.simple.SimpleQueryParser; import org.apache.lucene.queryparser.simple.SimpleQueryParser;
import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter; import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
@ -47,7 +56,12 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/** /**
* Lucene indexing handler. * Lucene indexing handler.
@ -242,34 +256,28 @@ public class LuceneIndexingHandler implements IndexingHandler {
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, "); StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, ");
sb.append(" s.count c5, "); sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 "); sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d "); sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " + sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " + " FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " + " WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " + " s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C ");
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D is null group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " + sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " + "from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " + "join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C "); "where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias // Add search criterias
if (criteria.getTargetIdList() != null) { if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) { // Read permission is enough for searching
// Read permission is enough for searching sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null "); sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null "); sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null "); criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
parameterMap.put("targetIdList", criteria.getTargetIdList());
} }
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) { parameterMap.put("targetIdList", criteria.getTargetIdList());
documentSearchMap = search(criteria.getSearch(), criteria.getFullSearch()); if (!Strings.isNullOrEmpty(criteria.getSimpleSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSimpleSearch(), criteria.getFullSearch());
if (documentSearchMap.isEmpty()) { if (documentSearchMap.isEmpty()) {
// If the search doesn't find any document, the request should return nothing // If the search doesn't find any document, the request should return nothing
documentSearchMap.put(UUID.randomUUID().toString(), null); documentSearchMap.put(UUID.randomUUID().toString(), null);
@ -295,11 +303,11 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax"); criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax()); parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
} }
if (criteria.getTitle() != null) { if (!criteria.getTitleList().isEmpty()) {
criteriaList.add("d.DOC_TITLE_C = :title"); criteriaList.add("d.DOC_TITLE_C in :title");
parameterMap.put("title", criteria.getTitle()); parameterMap.put("title", criteria.getTitleList());
} }
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) { if (!criteria.getTagIdList().isEmpty()) {
int index = 0; int index = 0;
for (List<String> tagIdList : criteria.getTagIdList()) { for (List<String> tagIdList : criteria.getTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList(); List<String> tagCriteriaList = Lists.newArrayList();
@ -312,7 +320,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")"); criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
} }
} }
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) { if (!criteria.getExcludedTagIdList().isEmpty()) {
int index = 0; int index = 0;
for (List<String> tagIdList : criteria.getExcludedTagIdList()) { for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList(); List<String> tagCriteriaList = Lists.newArrayList();
@ -347,10 +355,8 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_DELETEDATE_D is null"); criteriaList.add("d.DOC_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap); QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
@ -369,8 +375,6 @@ public class LuceneIndexingHandler implements IndexingHandler {
documentDto.setFileId((String) o[i++]); documentDto.setFileId((String) o[i++]);
Number shareCount = (Number) o[i++]; Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0); documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null); documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]); documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime()); documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
@ -408,14 +412,14 @@ public class LuceneIndexingHandler implements IndexingHandler {
/** /**
* Fulltext search in files and documents. * Fulltext search in files and documents.
* *
* @param searchQuery Search query on metadatas * @param simpleSearchQuery Search query on metadatas
* @param fullSearchQuery Search query on all fields * @param fullSearchQuery Search query on all fields
* @return Map of document IDs as key and highlight as value * @return Map of document IDs as key and highlight as value
* @throws Exception e * @throws Exception e
*/ */
private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception { private Map<String, String> search(String simpleSearchQuery, String fullSearchQuery) throws Exception {
// The fulltext query searches in all fields // The fulltext query searches in all fields
searchQuery = searchQuery + " " + fullSearchQuery; String searchQuery = simpleSearchQuery + " " + fullSearchQuery;
// Build search query // Build search query
Analyzer analyzer = new StandardAnalyzer(); Analyzer analyzer = new StandardAnalyzer();

View File

@ -1,6 +1,6 @@
package com.sismics.docs.core.util.jpa; package com.sismics.docs.core.util.jpa;
import javax.persistence.Query; import jakarta.persistence.Query;
import java.util.List; import java.util.List;
/** /**
@ -68,7 +68,7 @@ public class PaginatedLists {
} }
/** /**
* Executes a query and returns the data of the currunt page. * Executes a query and returns the data of the current page.
* *
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects * @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters * @param queryParam Query parameters
@ -82,18 +82,6 @@ public class PaginatedLists {
q.setMaxResults(paginatedList.getLimit()); q.setMaxResults(paginatedList.getLimit());
return q.getResultList(); return q.getResultList();
} }
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
* @return List of results
*/
public static <E> List<Object[]> executePaginatedQuery(PaginatedList<E> paginatedList, QueryParam queryParam) {
executeCountQuery(paginatedList, queryParam);
return executeResultQuery(paginatedList, queryParam);
}
/** /**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page). * Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).

View File

@ -2,8 +2,8 @@ package com.sismics.docs.core.util.jpa;
import java.util.Map.Entry; import java.util.Map.Entry;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import javax.persistence.Query; import jakarta.persistence.Query;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;

View File

@ -17,9 +17,9 @@ import org.jsoup.Jsoup;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.json.Json; import jakarta.json.Json;
import javax.json.JsonObject; import jakarta.json.JsonObject;
import javax.json.JsonReader; import jakarta.json.JsonReader;
import javax.mail.Message; import javax.mail.Message;
import javax.mail.MessagingException; import javax.mail.MessagingException;
import javax.mail.Multipart; import javax.mail.Multipart;
@ -29,6 +29,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.StringReader; import java.io.StringReader;
import java.io.StringWriter; import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
@ -87,12 +88,12 @@ public class EmailUtil {
try { try {
// Build email headers // Build email headers
HtmlEmail email = new HtmlEmail(); HtmlEmail email = new HtmlEmail();
email.setCharset("UTF-8"); email.setCharset(StandardCharsets.UTF_8.name());
ConfigDao configDao = new ConfigDao(); ConfigDao configDao = new ConfigDao();
// Hostname // Hostname
String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV); String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV);
if (envHostname == null) { if (Strings.isNullOrEmpty(envHostname)) {
email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME)); email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME));
} else { } else {
email.setHostName(envHostname); email.setHostName(envHostname);
@ -101,7 +102,7 @@ public class EmailUtil {
// Port // Port
int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT); int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT);
String envPort = System.getenv(Constants.SMTP_PORT_ENV); String envPort = System.getenv(Constants.SMTP_PORT_ENV);
if (envPort != null) { if (!Strings.isNullOrEmpty(envPort)) {
port = Integer.valueOf(envPort); port = Integer.valueOf(envPort);
} }
email.setSmtpPort(port); email.setSmtpPort(port);
@ -114,7 +115,7 @@ public class EmailUtil {
// Username and password // Username and password
String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV); String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV);
String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV); String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV);
if (envUsername == null || envPassword == null) { if (Strings.isNullOrEmpty(envUsername) || Strings.isNullOrEmpty(envPassword)) {
Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME); Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME);
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD); Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
if (usernameConfig != null && passwordConfig != null) { if (usernameConfig != null && passwordConfig != null) {

View File

@ -1,6 +1,5 @@
package com.sismics.util; package com.sismics.util;
import com.google.common.base.Charsets;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import javax.imageio.IIOImage; import javax.imageio.IIOImage;
@ -13,6 +12,7 @@ import java.awt.image.BufferedImage;
import java.awt.image.WritableRaster; import java.awt.image.WritableRaster;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Iterator; import java.util.Iterator;
/** /**
@ -80,7 +80,7 @@ public class ImageUtil {
} }
return Hashing.md5().hashString( return Hashing.md5().hashString(
email.trim().toLowerCase(), Charsets.UTF_8) email.trim().toLowerCase(), StandardCharsets.UTF_8)
.toString(); .toString();
} }

View File

@ -1,7 +1,7 @@
package com.sismics.util; package com.sismics.util;
import javax.json.Json; import jakarta.json.Json;
import javax.json.JsonValue; import jakarta.json.JsonValue;
/** /**
* JSON utilities. * JSON utilities.

View File

@ -8,6 +8,7 @@ import java.io.IOException;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.*; import java.util.*;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
@ -53,7 +54,7 @@ public class ResourceUtil {
// Extract the JAR path // Extract the JAR path
String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!")); String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!"));
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8")); JarFile jar = new JarFile(URLDecoder.decode(jarPath, StandardCharsets.UTF_8));
Set<String> fileSet = new HashSet<String>(); Set<String> fileSet = new HashSet<String>();
try { try {

View File

@ -3,7 +3,7 @@ package com.sismics.util.context;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import javax.persistence.EntityManager; import jakarta.persistence.EntityManager;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;

View File

@ -39,7 +39,7 @@ abstract class DbOpenHelper {
private static final Logger log = LoggerFactory.getLogger(DbOpenHelper.class); private static final Logger log = LoggerFactory.getLogger(DbOpenHelper.class);
private final JdbcConnectionAccess jdbcConnectionAccess; private final JdbcConnectionAccess jdbcConnectionAccess;
private final List<Exception> exceptions = new ArrayList<>(); private final List<Exception> exceptions = new ArrayList<>();
private Formatter formatter; private Formatter formatter;
@ -99,7 +99,7 @@ abstract class DbOpenHelper {
onCreate(); onCreate();
oldVersion = 0; oldVersion = 0;
} }
// Execute update script // Execute update script
ResourceBundle configBundle = ConfigUtil.getConfigBundle(); ResourceBundle configBundle = ConfigUtil.getConfigBundle();
Integer currentVersion = Integer.parseInt(configBundle.getString("db.version")); Integer currentVersion = Integer.parseInt(configBundle.getString("db.version"));
@ -126,7 +126,7 @@ abstract class DbOpenHelper {
/** /**
* Execute all upgrade scripts in ascending order for a given version. * Execute all upgrade scripts in ascending order for a given version.
* *
* @param version Version number * @param version Version number
* @throws Exception e * @throws Exception e
*/ */
@ -136,7 +136,7 @@ abstract class DbOpenHelper {
return name.matches("dbupdate-" + versionString + "-\\d+\\.sql"); return name.matches("dbupdate-" + versionString + "-\\d+\\.sql");
}); });
Collections.sort(fileNameList); Collections.sort(fileNameList);
for (String fileName : fileNameList) { for (String fileName : fileNameList) {
if (log.isInfoEnabled()) { if (log.isInfoEnabled()) {
log.info(MessageFormat.format("Executing script: {0}", fileName)); log.info(MessageFormat.format("Executing script: {0}", fileName));
@ -145,16 +145,16 @@ abstract class DbOpenHelper {
executeScript(is); executeScript(is);
} }
} }
/** /**
* Execute a SQL script. All statements must be one line only. * Execute a SQL script. All statements must be one line only.
* *
* @param inputScript Script to execute * @param inputScript Script to execute
* @throws IOException e * @throws IOException e
*/ */
private void executeScript(InputStream inputScript) throws IOException { private void executeScript(InputStream inputScript) throws IOException {
List<String> lines = CharStreams.readLines(new InputStreamReader(inputScript)); List<String> lines = CharStreams.readLines(new InputStreamReader(inputScript));
for (String sql : lines) { for (String sql : lines) {
if (Strings.isNullOrEmpty(sql) || sql.startsWith("--")) { if (Strings.isNullOrEmpty(sql) || sql.startsWith("--")) {
continue; continue;
@ -178,13 +178,13 @@ abstract class DbOpenHelper {
} }
public abstract void onCreate() throws Exception; public abstract void onCreate() throws Exception;
public abstract void onUpgrade(int oldVersion, int newVersion) throws Exception; public abstract void onUpgrade(int oldVersion, int newVersion) throws Exception;
/** /**
* Returns a List of all Exceptions which occured during the export. * Returns a List of all Exceptions which occurred during the export.
* *
* @return A List containig the Exceptions occured during the export * @return A List containing the Exceptions occurred during the export
*/ */
public List<?> getExceptions() { public List<?> getExceptions() {
return exceptions; return exceptions;
@ -192,7 +192,7 @@ abstract class DbOpenHelper {
/** /**
* Format the output SQL statements. * Format the output SQL statements.
* *
* @param format True to format * @param format True to format
*/ */
public void setFormat(boolean format) { public void setFormat(boolean format) {

View File

@ -1,5 +1,6 @@
package com.sismics.util.jpa; package com.sismics.util.jpa;
import com.google.common.base.Strings;
import com.sismics.docs.core.util.DirectoryUtil; import com.sismics.docs.core.util.DirectoryUtil;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
@ -7,8 +8,8 @@ import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.persistence.EntityManagerFactory; import jakarta.persistence.EntityManagerFactory;
import javax.persistence.Persistence; import jakarta.persistence.Persistence;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URL; import java.net.URL;
@ -25,7 +26,7 @@ import java.util.Properties;
public final class EMF { public final class EMF {
private static final Logger log = LoggerFactory.getLogger(EMF.class); private static final Logger log = LoggerFactory.getLogger(EMF.class);
private static Map<Object, Object> properties; private static Properties properties;
private static EntityManagerFactory emfInstance; private static EntityManagerFactory emfInstance;
@ -58,7 +59,7 @@ public final class EMF {
} }
} }
private static Map<Object, Object> getEntityManagerProperties() { private static Properties getEntityManagerProperties() {
// Use properties file if exists // Use properties file if exists
try { try {
URL hibernatePropertiesUrl = EMF.class.getResource("/hibernate.properties"); URL hibernatePropertiesUrl = EMF.class.getResource("/hibernate.properties");
@ -78,19 +79,24 @@ public final class EMF {
String databaseUrl = System.getenv("DATABASE_URL"); String databaseUrl = System.getenv("DATABASE_URL");
String databaseUsername = System.getenv("DATABASE_USER"); String databaseUsername = System.getenv("DATABASE_USER");
String databasePassword = System.getenv("DATABASE_PASSWORD"); String databasePassword = System.getenv("DATABASE_PASSWORD");
String databasePoolSize = System.getenv("DATABASE_POOL_SIZE");
if(databasePoolSize == null) {
databasePoolSize = "10";
}
log.info("Configuring EntityManager from environment parameters"); log.info("Configuring EntityManager from environment parameters");
Map<Object, Object> props = new HashMap<>(); Properties props = new Properties();
Path dbDirectory = DirectoryUtil.getDbDirectory(); Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString(); String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
if (databaseUrl == null) { if (Strings.isNullOrEmpty(databaseUrl)) {
log.warn("Using an embedded H2 database. Only suitable for testing purpose, not for production!");
props.put("hibernate.connection.driver_class", "org.h2.Driver"); props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect"); props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000"); props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");
props.put("hibernate.connection.username", "sa"); props.put("hibernate.connection.username", "sa");
} else { } else {
props.put("hibernate.connection.driver_class", "org.postgresql.Driver"); props.put("hibernate.connection.driver_class", "org.postgresql.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQL94Dialect"); props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
props.put("hibernate.connection.url", databaseUrl); props.put("hibernate.connection.url", databaseUrl);
props.put("hibernate.connection.username", databaseUsername); props.put("hibernate.connection.username", databaseUsername);
props.put("hibernate.connection.password", databasePassword); props.put("hibernate.connection.password", databasePassword);
@ -100,12 +106,9 @@ public final class EMF {
props.put("hibernate.format_sql", "false"); props.put("hibernate.format_sql", "false");
props.put("hibernate.max_fetch_depth", "5"); props.put("hibernate.max_fetch_depth", "5");
props.put("hibernate.cache.use_second_level_cache", "false"); props.put("hibernate.cache.use_second_level_cache", "false");
props.put("hibernate.c3p0.min_size", "1"); props.put("hibernate.connection.initial_pool_size", "1");
props.put("hibernate.c3p0.max_size", "10"); props.put("hibernate.connection.pool_size", databasePoolSize);
props.put("hibernate.c3p0.timeout", "5000"); props.put("hibernate.connection.pool_validation_interval", "5");
props.put("hibernate.c3p0.max_statements", "0");
props.put("hibernate.c3p0.acquire_increment", "1");
props.put("hibernate.c3p0.idle_test_period", "10");
return props; return props;
} }
@ -137,4 +140,4 @@ public final class EMF {
public static String getDriver() { public static String getDriver() {
return (String) properties.get("hibernate.connection.driver_class"); return (String) properties.get("hibernate.connection.driver_class");
} }
} }

View File

@ -1,6 +1,6 @@
package com.sismics.util.log4j; package com.sismics.util.log4j;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
/** /**

View File

@ -13,7 +13,7 @@ public class MimeType {
public static final String IMAGE_GIF = "image/gif"; public static final String IMAGE_GIF = "image/gif";
public static final String APPLICATION_ZIP = "application/zip"; public static final String APPLICATION_ZIP = "application/zip";
public static final String APPLICATION_PDF = "application/pdf"; public static final String APPLICATION_PDF = "application/pdf";
public static final String OPEN_DOCUMENT_TEXT = "application/vnd.oasis.opendocument.text"; public static final String OPEN_DOCUMENT_TEXT = "application/vnd.oasis.opendocument.text";

View File

@ -1,15 +1,9 @@
package com.sismics.util.mime; package com.sismics.util.mime;
import com.google.common.base.Charsets;
import org.apache.commons.compress.utils.IOUtils;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.net.URLConnection;
import java.io.UnsupportedEncodingException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/** /**
* Utility to check MIME types. * Utility to check MIME types.
@ -18,7 +12,7 @@ import java.util.zip.ZipInputStream;
*/ */
public class MimeTypeUtil { public class MimeTypeUtil {
/** /**
* Try to guess the MIME type of a file by its magic number (header). * Try to guess the MIME type of a file.
* *
* @param file File to inspect * @param file File to inspect
* @param name File name * @param name File name
@ -26,57 +20,17 @@ public class MimeTypeUtil {
* @throws IOException e * @throws IOException e
*/ */
public static String guessMimeType(Path file, String name) throws IOException { public static String guessMimeType(Path file, String name) throws IOException {
String mimeType; String mimeType = Files.probeContentType(file);
try (InputStream is = Files.newInputStream(file)) {
byte[] headerBytes = new byte[64]; if (mimeType == null && name != null) {
is.read(headerBytes); mimeType = URLConnection.getFileNameMap().getContentTypeFor(name);
mimeType = guessMimeType(headerBytes, name);
} }
return guessOpenDocumentFormat(mimeType, file); if (mimeType == null) {
} return MimeType.DEFAULT;
/**
* Try to guess the MIME type of a file by its magic number (header).
*
* @param headerBytes File header (first bytes)
* @param name File name
* @return MIME type
* @throws UnsupportedEncodingException e
*/
public static String guessMimeType(byte[] headerBytes, String name) throws UnsupportedEncodingException {
String header = new String(headerBytes, "US-ASCII");
// Detect by header bytes
if (header.startsWith("PK")) {
return MimeType.APPLICATION_ZIP;
} else if (header.startsWith("GIF87a") || header.startsWith("GIF89a")) {
return MimeType.IMAGE_GIF;
} else if (headerBytes[0] == ((byte) 0xff) && headerBytes[1] == ((byte) 0xd8)) {
return MimeType.IMAGE_JPEG;
} else if (headerBytes[0] == ((byte) 0x89) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x4e) && headerBytes[3] == ((byte) 0x47) &&
headerBytes[4] == ((byte) 0x0d) && headerBytes[5] == ((byte) 0x0a) && headerBytes[6] == ((byte) 0x1a) && headerBytes[7] == ((byte) 0x0a)) {
return MimeType.IMAGE_PNG;
} else if (headerBytes[0] == ((byte) 0x25) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x44) && headerBytes[3] == ((byte) 0x46)) {
return MimeType.APPLICATION_PDF;
} else if (headerBytes[0] == ((byte) 0x00) && headerBytes[1] == ((byte) 0x00) && headerBytes[2] == ((byte) 0x00)
&& (headerBytes[3] == ((byte) 0x14) || headerBytes[3] == ((byte) 0x18) || headerBytes[3] == ((byte) 0x20))
&& headerBytes[4] == ((byte) 0x66) && headerBytes[5] == ((byte) 0x74) && headerBytes[6] == ((byte) 0x79) && headerBytes[7] == ((byte) 0x70)) {
return MimeType.VIDEO_MP4;
} else if (headerBytes[0] == ((byte) 0x1a) && headerBytes[1] == ((byte) 0x45) && headerBytes[2] == ((byte) 0xdf) && headerBytes[3] == ((byte) 0xa3)) {
return MimeType.VIDEO_WEBM;
} }
// Detect by file extension return mimeType;
if (name != null) {
if (name.endsWith(".txt")) {
return MimeType.TEXT_PLAIN;
} else if (name.endsWith(".csv")) {
return MimeType.TEXT_CSV;
}
}
return MimeType.DEFAULT;
} }
/** /**
@ -113,52 +67,4 @@ public class MimeTypeUtil {
return "bin"; return "bin";
} }
} }
/**
* Guess the MIME type of open document formats (docx and odt).
* It's more costly than the simple header check, but needed because open document formats
* are simple ZIP files on the outside and much bigger on the inside.
*
* @param mimeType Currently detected MIME type
* @param file File on disk
* @return MIME type
*/
private static String guessOpenDocumentFormat(String mimeType, Path file) {
if (!MimeType.APPLICATION_ZIP.equals(mimeType)) {
// open document formats are ZIP files
return mimeType;
}
try (InputStream inputStream = Files.newInputStream(file);
ZipInputStream zipInputStream = new ZipInputStream(inputStream, Charsets.ISO_8859_1)) {
ZipEntry archiveEntry = zipInputStream.getNextEntry();
while (archiveEntry != null) {
if (archiveEntry.getName().equals("mimetype")) {
// Maybe it's an ODT file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (MimeType.OPEN_DOCUMENT_TEXT.equals(content.trim())) {
mimeType = MimeType.OPEN_DOCUMENT_TEXT;
break;
}
} else if (archiveEntry.getName().equals("[Content_Types].xml")) {
// Maybe it's a DOCX file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (content.contains(MimeType.OFFICE_DOCUMENT)) {
mimeType = MimeType.OFFICE_DOCUMENT;
break;
} else if (content.contains(MimeType.OFFICE_PRESENTATION)) {
mimeType = MimeType.OFFICE_PRESENTATION;
break;
}
}
archiveEntry = zipInputStream.getNextEntry();
}
} catch (Exception e) {
// In case of any error, just give up and keep the ZIP MIME type
return mimeType;
}
return mimeType;
}
} }

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<persistence xmlns="http://java.sun.com/xml/ns/persistence" <persistence xmlns="https://jakarta.ee/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd" xsi:schemaLocation="https://jakarta.ee/xml/ns/persistence https://jakarta.ee/xml/ns/persistence/persistence_3_0.xsd"
version="2.0"> version="3.0">
<persistence-unit name="transactions-optional" transaction-type="RESOURCE_LOCAL"> <persistence-unit name="transactions-optional" transaction-type="RESOURCE_LOCAL">
<provider>org.hibernate.jpa.HibernatePersistenceProvider</provider> <provider>org.hibernate.jpa.HibernatePersistenceProvider</provider>
</persistence-unit> </persistence-unit>

View File

@ -1 +1 @@
db.version=27 db.version=31

View File

@ -0,0 +1,2 @@
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_STARTTLS', 'false');
update T_CONFIG set CFG_VALUE_C = '28' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,2 @@
alter table T_FILE add column FIL_SIZE_N bigint not null default -1;
update T_CONFIG set CFG_VALUE_C = '29' where CFG_ID_C = 'DB_VERSION';

Some files were not shown because too many files have changed in this diff Show More