Compare commits

..

No commits in common. "master" and "v1.7" have entirely different histories.
master ... v1.7

293 changed files with 3213 additions and 10399 deletions

3
.github/FUNDING.yml vendored
View File

@ -1,3 +0,0 @@
# These are supported funding model platforms
github: [jendib]

View File

@ -1,84 +0,0 @@
name: Maven CI/CD
on:
push:
branches: [master]
tags: [v*]
workflow_dispatch:
jobs:
build_and_publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: "11"
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get update && sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn --batch-mode -Pprod clean install
- name: Upload war artifact
uses: actions/upload-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target/docs*.war
build_docker_image:
name: Publish to Docker Hub
runs-on: ubuntu-latest
needs: [build_and_publish]
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Download war artifact
uses: actions/download-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target
-
name: Setup up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Populate Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: sismics/docs
flavor: |
latest=false
tags: |
type=ref,event=tag
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
labels: |
org.opencontainers.image.title = Teedy
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
org.opencontainers.image.created = ${{ github.event_created_at }}
org.opencontainers.image.author = Sismics
org.opencontainers.image.url = https://teedy.io/
org.opencontainers.image.vendor = Sismics
org.opencontainers.image.license = GPLv2
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

11
.gitignore vendored
View File

@ -11,11 +11,6 @@
*.iml
node_modules
import_test
teedy-importer-linux
teedy-importer-macos
teedy-importer-win.exe
docs/*
!docs/.gitkeep
#macos
.DS_Store
docs-importer-linux
docs-importer-macos
docs-importer-win.exe

26
.travis.yml Normal file
View File

@ -0,0 +1,26 @@
sudo: required
dist: trusty
language: java
before_install:
- sudo add-apt-repository -y ppa:mc3man/trusty-media
- sudo apt-get -qq update
- sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb
- sudo apt-get -y -q install haveged && sudo service haveged start
after_success:
- |
if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
mvn -Pprod -DskipTests clean install
docker login -u $DOCKER_USER -p $DOCKER_PASS
export REPO=sismics/docs
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -f Dockerfile -t $REPO:$COMMIT .
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
fi
env:
global:
- secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0=
- secure: bCDDR6+I7PmSkuTYZv1HF/z98ANX/SFEESUCqxVmV5Gs0zFC0vQXaPJQ2xaJNRop1HZBFMZLeMMPleb0iOs985smpvK2F6Rbop9Tu+Vyo0uKqv9tbZ7F8Nfgnv9suHKZlL84FNeUQZJX6vsFIYPEJ/r7K5P/M0PdUy++fEwxEhU=
- secure: ewXnzbkgCIHpDWtaWGMa1OYZJ/ki99zcIl4jcDPIC0eB3njX/WgfcC6i0Ke9mLqDqwXarWJ6helm22sNh+xtQiz6isfBtBX+novfRt9AANrBe3koCMUemMDy7oh5VflBaFNP0DVb8LSCnwf6dx6ZB5E9EB8knvk40quc/cXpGjY=
- COMMIT=${TRAVIS_COMMIT::8}

View File

@ -1,75 +1,11 @@
FROM ubuntu:22.04
LABEL maintainer="b.gamard@sismics.com"
FROM sismics/ubuntu-jetty:9.4.12
MAINTAINER b.gamard@sismics.com
# Run Debian in non interactive mode
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb && \
apt-get clean && rm -rf /var/lib/apt/lists/*
# Configure env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV JAVA_HOME /usr/lib/jvm/java-11-openjdk-amd64/
ENV JAVA_OPTIONS -Dfile.encoding=UTF-8 -Xmx1g
ENV JETTY_VERSION 11.0.20
ENV JETTY_HOME /opt/jetty
# Remove the embedded javax.mail jar from Jetty
RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar
# Install packages
RUN apt-get update && \
apt-get -y -q --no-install-recommends install \
vim less procps unzip wget tzdata openjdk-11-jdk \
ffmpeg \
mediainfo \
tesseract-ocr \
tesseract-ocr-ara \
tesseract-ocr-ces \
tesseract-ocr-chi-sim \
tesseract-ocr-chi-tra \
tesseract-ocr-dan \
tesseract-ocr-deu \
tesseract-ocr-fin \
tesseract-ocr-fra \
tesseract-ocr-heb \
tesseract-ocr-hin \
tesseract-ocr-hun \
tesseract-ocr-ita \
tesseract-ocr-jpn \
tesseract-ocr-kor \
tesseract-ocr-lav \
tesseract-ocr-nld \
tesseract-ocr-nor \
tesseract-ocr-pol \
tesseract-ocr-por \
tesseract-ocr-rus \
tesseract-ocr-spa \
tesseract-ocr-swe \
tesseract-ocr-tha \
tesseract-ocr-tur \
tesseract-ocr-ukr \
tesseract-ocr-vie \
tesseract-ocr-sqi \
&& apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN dpkg-reconfigure -f noninteractive tzdata
# Install Jetty
RUN wget -nv -O /tmp/jetty.tar.gz \
"https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-home/${JETTY_VERSION}/jetty-home-${JETTY_VERSION}.tar.gz" \
&& tar xzf /tmp/jetty.tar.gz -C /opt \
&& mv /opt/jetty* /opt/jetty \
&& useradd jetty -U -s /bin/false \
&& chown -R jetty:jetty /opt/jetty \
&& mkdir /opt/jetty/webapps \
&& chmod +x /opt/jetty/bin/jetty.sh
EXPOSE 8080
# Install app
RUN mkdir /app && \
cd /app && \
java -jar /opt/jetty/start.jar --add-modules=server,http,webapp,deploy
ADD docs.xml /app/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /app/webapps/docs.war
WORKDIR /app
CMD ["java", "-jar", "/opt/jetty/start.jar"]
ADD docs.xml /opt/jetty/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war

196
README.md
View File

@ -2,32 +2,35 @@
<img src="https://teedy.io/img/github-title.png" alt="Teedy" width=500 />
</h3>
[![Twitter: @teedyio](https://img.shields.io/badge/contact-@teedyio-blue.svg?style=flat)](https://twitter.com/teedyio)
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Maven CI/CD](https://github.com/sismics/docs/actions/workflows/build-deploy.yml/badge.svg)](https://github.com/sismics/docs/actions/workflows/build-deploy.yml)
[![Build Status](https://secure.travis-ci.org/sismics/docs.png)](http://travis-ci.org/sismics/docs)
Teedy is an open source, lightweight document management system for individuals and businesses.
**Discuss it on [Product Hunt](https://www.producthunt.com/posts/sismics-docs) 🦄**
<hr />
<h2 align="center">
<a href="https://github.com/users/jendib/sponsorship">Sponsor this project if you use and appreciate it!</a>
Sismics Docs is now called Teedy! You can still find our cloud and support offer on <a href="https://teedy.io">teedy.io</a>
</h2>
<hr />
![New!](https://teedy.io/img/laptop-demo.png?20180301)
# Demo
Demo
----
A demo is available at [demo.teedy.io](https://demo.teedy.io)
- Guest login is enabled with read access on all documents
- "admin" login with "admin" password
- "demo" login with "password" password
# Features
Features
--------
- Responsive user interface
- Optical character recognition
- LDAP authentication ![New!](https://www.sismics.com/public/img/new.png)
- Support image, PDF, ODT, DOCX, PPTX files
- Video file support
- Flexible search engine with suggestions and highlighting
@ -53,194 +56,87 @@ A demo is available at [demo.teedy.io](https://demo.teedy.io)
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
- Tested to one million documents
# Install with Docker
Install with Docker
-------------------
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. If no PostgreSQL config is provided, the database is an embedded H2 database. The H2 embedded database should only be used for testing. For production usage use the provided PostgreSQL configuration (check the Docker Compose example)
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
**The default admin password is "admin". Don't forget to change it before going to production.**
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
- Latest stable version: `sismics/docs:v1.11`
- Latest stable version: `sismics/docs:v1.7`
The data directory is `/data`. Don't forget to mount a volume on it.
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
## Available environment variables
Manual installation
-------------------
- General
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
- `DOCS_GLOBAL_QUOTA`: Defines the default quota applying to all users.
- `DOCS_BCRYPT_WORK`: Defines the work factor which is used for password hashing. The default is `10`. This value may be `4...31` including `4` and `31`. The specified value will be used for all new users and users changing their password. Be aware that setting this factor to high can heavily impact login and user creation performance.
- Admin
- `DOCS_ADMIN_EMAIL_INIT`: Defines the e-mail-address the admin user should have upon initialization.
- `DOCS_ADMIN_PASSWORD_INIT`: Defines the password the admin user should have upon initialization. Needs to be a bcrypt hash. **Be aware that `$` within the hash have to be escaped with a second `$`.**
- Database
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
- `DATABASE_USER`: The user which should be used for the database connection.
- `DATABASE_PASSWORD`: The password to be used for the database connection.
- `DATABASE_POOL_SIZE`: The pool size to be used for the database connection.
- Language
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
- `eng`, `fra`, `ita`, `deu`, `spa`, `por`, `pol`, `rus`, `ukr`, `ara`, `hin`, `chi_sim`, `chi_tra`, `jpn`, `tha`, `kor`, `nld`, `tur`, `heb`, `hun`, `fin`, `swe`, `lav`, `dan`
- E-Mail
- `DOCS_SMTP_HOSTNAME`: Hostname of the SMTP-Server to be used by Teedy.
- `DOCS_SMTP_PORT`: The port which should be used.
- `DOCS_SMTP_USERNAME`: The username to be used.
- `DOCS_SMTP_PASSWORD`: The password to be used.
## Examples
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
### Default, using PostgreSQL
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
# Setup the database connection. "teedy-db" is the hostname
# and "teedy" is the name of the database the application
# will connect to.
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
DATABASE_USER: "teedy_db_user"
DATABASE_PASSWORD: "teedy_db_password"
DATABASE_POOL_SIZE: "10"
volumes:
- ./docs/data:/data
networks:
- docker-internal
- internet
depends_on:
- teedy-db
# DB for Teedy
teedy-db:
image: postgres:13.1-alpine
restart: unless-stopped
expose:
- 5432
environment:
POSTGRES_USER: "teedy_db_user"
POSTGRES_PASSWORD: "teedy_db_password"
POSTGRES_DB: "teedy"
volumes:
- ./docs/db:/var/lib/postgresql/data
networks:
- docker-internal
networks:
# Network without internet access. The db does not need
# access to the host network.
docker-internal:
driver: bridge
internal: true
internet:
driver: bridge
```
### Using the internal database (only for testing)
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
# Manual installation
## Requirements
- Java 11
- Tesseract 4 for OCR
#### Requirements
- Java 8 with the [Java Cryptography Extension](http://www.oracle.com/technetwork/java/javase/downloads/jce-7-download-432124.html)
- Tesseract 3 or 4 for OCR
- ffmpeg for video thumbnails
- mediainfo for video metadata extraction
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
## Download
#### Download
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
**The default admin password is "admin". Don't forget to change it before going to production.**
## How to build Teedy from the sources
How to build Teedy from the sources
----------------------------------
Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
Prerequisites: JDK 8 with JCE, Maven 3, Tesseract 3 or 4
Teedy is organized in several Maven modules:
- docs-core
- docs-web
- docs-web-common
- docs-core
- docs-web
- docs-web-common
First off, clone the repository: `git clone git://github.com/sismics/docs.git`
or download the sources from GitHub.
### Launch the build
#### Launch the build
From the root directory:
```console
mvn clean -DskipTests install
```
mvn clean -DskipTests install
### Run a stand-alone version
#### Run a stand-alone version
From the `docs-web` directory:
```console
mvn jetty:run
```
mvn jetty:run
### Build a .war to deploy to your servlet container
#### Build a .war to deploy to your servlet container
From the `docs-web` directory:
```console
mvn -Pprod -DskipTests clean install
```
mvn -Pprod -DskipTests clean install
You will get your deployable WAR in the `docs-web/target` directory.
# Contributing
Contributing
------------
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
# License
Community
---------
Get updates on Teedy's development and chat with the project maintainers:
- Follow [@teedyio on Twitter](https://twitter.com/teedyio)
- Read and subscribe to [The Official Teedy Blog](https://blog.teedy.io/)
- Check the [Official Website](https://teedy.io)
- Join us [on Facebook](https://www.facebook.com/teedyio)
License
-------
Teedy is released under the terms of the GPL license. See `COPYING` for more
information or see <http://opensource.org/licenses/GPL-2.0>.

View File

@ -1,18 +0,0 @@
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.10
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data

View File

@ -34,7 +34,6 @@ public class LanguageAdapter extends BaseAdapter {
languageList.add(new Language("fra", R.string.language_french, R.drawable.fra));
languageList.add(new Language("eng", R.string.language_english, R.drawable.eng));
languageList.add(new Language("deu", R.string.language_german, R.drawable.deu));
languageList.add(new Language("pol", R.string.language_polish, R.drawable.pol));
}
@Override

View File

@ -39,9 +39,7 @@ public class SearchQueryBuilder {
*/
public SearchQueryBuilder simpleSearch(String simpleSearch) {
if (isValid(simpleSearch)) {
query.append(SEARCH_SEPARATOR)
.append("simple:")
.append(simpleSearch);
query.append(SEARCH_SEPARATOR).append(simpleSearch);
}
return this;
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 238 B

View File

@ -1,164 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Validation -->
<string name="validate_error_email">Nieprawidłowy email</string>
<string name="validate_error_length_min">Za krótki (min. %d)</string>
<string name="validate_error_length_max">Za długi (max. %d)</string>
<string name="validate_error_required">Wymagany</string>
<string name="validate_error_alphanumeric">Tylko litery i cyfry</string>
<!-- App -->
<string name="app_name" translatable="false">Teedy</string>
<string name="drawer_open">Otwórz szufladę nawigacji</string>
<string name="drawer_close">Zamknij szufladę nawigacji</string>
<string name="login_explain"><![CDATA[Aby rozpocząć, musisz pobrać i zainstalować serwer Teedy na <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> i poniżej wprowadzić adres]]></string>
<string name="server">Serwer</string>
<string name="username">Użytkownik</string>
<string name="password">Hasło</string>
<string name="login">Zaloguj</string>
<string name="ok">OK</string>
<string name="cancel">Anuluj</string>
<string name="login_fail_title">Błąd logowania</string>
<string name="login_fail">Nieprawidłowa nazwa użytkownika lub hasło</string>
<string name="network_error_title">Błąd sieci</string>
<string name="network_error">Błąd sieci, sprawdź połączenie z interneterm oraz adres URL serwera</string>
<string name="invalid_url_title">Nieprawidłowy adres URL</string>
<string name="invalid_url">Sprawdź adres URL serwera i spróbuj ponownie</string>
<string name="crash_toast_text">Wystąpiła awaria, wysłano raport w celu rozwiązania tego problemu</string>
<string name="created_date">Data utworzenia</string>
<string name="download_file">Pobierz bieżący plik</string>
<string name="download_document">Pobierz</string>
<string name="action_search">Znadź dokumenty</string>
<string name="all_documents">Wszystkie dokumenty</string>
<string name="shared_documents">Udostępnione dokumenty</string>
<string name="all_tags">Wszystkie etykiety</string>
<string name="no_tags">Brak etykiet</string>
<string name="error_loading_tags">Błąd ładowania etykiet</string>
<string name="no_documents">Brak dokumentów</string>
<string name="error_loading_documents">Błąd ładowania dokumentów</string>
<string name="no_files">Brak plików</string>
<string name="error_loading_files">Błąd ładowania plików</string>
<string name="new_document">Nowy dokument</string>
<string name="share">Udostępnij</string>
<string name="close">Zamknij</string>
<string name="add">Dodaj</string>
<string name="add_share_hint">Nazwa udostępnienia (opcjonalnie)</string>
<string name="document_not_shared">Ten dokument nie jest obecnie udostępniony</string>
<string name="delete_share">Usuń udostępnienie</string>
<string name="send_share">Wyślij link udostępnienia</string>
<string name="error_loading_shares">Błąd ładowania udostępnień</string>
<string name="error_adding_share">Błąd dodawania udostępnienia</string>
<string name="share_default_name">Udostępnij link</string>
<string name="error_deleting_share">Błąd usuwania udostępnienia</string>
<string name="send_share_to">Wyślij link udostępnienia do</string>
<string name="upload_file">dodaj plik</string>
<string name="upload_from">Przeslij plik z</string>
<string name="settings">ustawienia</string>
<string name="logout">Wyloguj</string>
<string name="version">Wersja</string>
<string name="build">Kompilacja</string>
<string name="pref_advanced_category">Ustawienia zaawansowane</string>
<string name="pref_about_category">O programie</string>
<string name="pref_github">GitHub</string>
<string name="pref_issue">Zgłoś błąd</string>
<string name="pref_clear_cache_title">Wyczyść cache</string>
<string name="pref_clear_cache_summary">Wyczyść podręczne pliki</string>
<string name="pref_clear_cache_success">Cache wyczyszczony</string>
<string name="pref_clear_history_title">Wyczyść historię wyszukiwania</string>
<string name="pref_clear_history_summary">Opróżnij ostatnie sugestie wyszukiwania</string>
<string name="pref_clear_history_success">Historia wyszukiwania wyczyszczona</string>
<string name="pref_cache_size">Rozmiar cache</string>
<string name="language_french" translatable="false">Francuski</string>
<string name="language_english" translatable="false">Angielski</string>
<string name="language_german" translatable="false">Niemiecki</string>
<string name="language_polish" translatable="false">Polski</string>
<string name="save">Zapisz</string>
<string name="edit_document">Edytuj</string>
<string name="error_editing_document">Błąd sieci, spróbuj ponownie</string>
<string name="please_wait">Proszę czekać</string>
<string name="document_editing_message">Wysyłam twoje dane</string>
<string name="delete_document">Usuń</string>
<string name="delete_document_title">Usuń dokument</string>
<string name="delete_document_message">Naprawdę chcesz usunąć dokument i powiązane z nim pliki?</string>
<string name="document_delete_failure">Błąd sieci w czasie usuwania tego dokumentu</string>
<string name="document_deleting_message">Usuwanie dokumentu</string>
<string name="delete_file_title">Usuń plik</string>
<string name="delete_file_message">Naprawdę chcesz usunąć ten plik?</string>
<string name="file_delete_failure">Błąd sieci w czasie usuwania bieżącego pliku</string>
<string name="file_deleting_message">Usuwanie pliku</string>
<string name="error_reading_file">Błąd podczas odczytu pliku</string>
<string name="upload_notification_title">Teedy</string>
<string name="upload_notification_message">Przesyłanie nowego pliku do dokumentu</string>
<string name="upload_notification_error">Błąd przsyłania nowego pliku</string>
<string name="delete_file">Usuń bieżący plik</string>
<string name="advanced_search">Zaawansowane wyszukiwanie</string>
<string name="search">Znajdź</string>
<string name="add_tags">Dodaj eytkiety</string>
<string name="creation_date">Data utworzenia</string>
<string name="description">Opis</string>
<string name="title">Tytuł</string>
<string name="simple_search">Proste wyszukiwanie</string>
<string name="fulltext_search">Wyszukiwanie pełnotekstowe</string>
<string name="creator">Autor</string>
<string name="after_date">Po dacie</string>
<string name="before_date">Przed datą</string>
<string name="search_tags">Znajdź etykiety</string>
<string name="all_languages">Wszystkie języki</string>
<string name="toggle_informations">Przełącz informacje</string>
<string name="who_can_access">Kto ma dostęp</string>
<string name="comments">Komentarze</string>
<string name="no_comments">Brak komentarzy</string>
<string name="error_loading_comments">Błąd ładowania komentarzy</string>
<string name="send">Wyślij</string>
<string name="add_comment">Dodaj komentarz</string>
<string name="comment_add_failure">Błąd dodawania komentarza</string>
<string name="adding_comment">Dodawanie komentarza</string>
<string name="comment_delete">Usuń komentarz</string>
<string name="deleting_comment">Usuwanie komentarza</string>
<string name="error_deleting_comment">Błąd usuwania komentarza</string>
<string name="export_pdf">PDF</string>
<string name="download">Pobierz</string>
<string name="margin">Margines</string>
<string name="fit_image_to_page">Dostosuj obraz do strony</string>
<string name="export_comments">Eksport komentarzy</string>
<string name="export_metadata">Eksport metadanych</string>
<string name="mm">mm</string>
<string name="download_file_title">Eksport plików Teedy</string>
<string name="download_document_title">Eksport dokumentu Teedy</string>
<string name="download_pdf_title">Eksport Teedy jako PDF</string>
<string name="latest_activity">Ostatnie aktywności</string>
<string name="activity">Aktywności</string>
<string name="email">E-mail</string>
<string name="storage_quota">Limit magazynu</string>
<string name="storage_display">%1$d/%2$d MB</string>
<string name="validation_code">Kod weryfikujący</string>
<string name="shared">Udostępnienie</string>
<string name="language">Język</string>
<string name="coverage">Zakres</string>
<string name="type">Rodzaj</string>
<string name="source">Źródło</string>
<string name="format">Format</string>
<string name="publisher">Udostępniający</string>
<string name="identifier">Identifikator</string>
<string name="subject">temat</string>
<string name="rights">Prawa</string>
<string name="contributors">Współtwórcy</string>
<string name="relations">Powiązania</string>
<!-- Audit log -->
<string name="auditlog_Acl">ACL</string>
<string name="auditlog_Comment">Komentarz</string>
<string name="auditlog_Document">Dokument</string>
<string name="auditlog_File">Plik</string>
<string name="auditlog_Group">Grupa</string>
<string name="auditlog_Route">Przepływ</string>
<string name="auditlog_RouteModel">Model przepływu</string>
<string name="auditlog_Tag">Etykieta</string>
<string name="auditlog_User">Użytkownik</string>
<string name="auditlog_Webhook">Webhook</string>
<string name="auditlog_created">utworzony</string>
<string name="auditlog_updated">zaktualizowany</string>
<string name="auditlog_deleted">usunięty</string>
</resources>

View File

@ -72,7 +72,6 @@
<string name="language_french" translatable="false">Français</string>
<string name="language_english" translatable="false">English</string>
<string name="language_german" translatable="false">Deutsch</string>
<string name="language_polish" translatable="false">Polski</string>
<string name="save">Save</string>
<string name="edit_document">Edit</string>
<string name="error_editing_document">Network error, please try again</string>

View File

@ -5,10 +5,10 @@
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
<version>1.7</version>
<relativePath>..</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>docs-core</artifactId>
<packaging>jar</packaging>
@ -17,10 +17,20 @@
<dependencies>
<!-- Persistence layer dependencies -->
<dependency>
<groupId>org.hibernate.orm</groupId>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>joda-time</groupId>
@ -31,30 +41,30 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
</dependency>
<dependency>
<groupId>jakarta.json</groupId>
<artifactId>jakarta.json-api</artifactId>
<groupId>org.glassfish</groupId>
<artifactId>javax.json</artifactId>
</dependency>
<dependency>
@ -66,37 +76,37 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>at.favre.lib</groupId>
<artifactId>bcrypt</artifactId>
<groupId>org.mindrot</groupId>
<artifactId>jbcrypt</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
@ -112,42 +122,37 @@
<artifactId>lucene-highlighter</artifactId>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-client-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
</dependency>
<dependency>
<groupId>org.imgscalr</groupId>
<artifactId>imgscalr-lib</artifactId>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</dependency>
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.odfdom.converter.pdf</artifactId>
@ -184,21 +189,21 @@
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Development profile (active by default) -->
<profile>
@ -210,7 +215,7 @@
<value>dev</value>
</property>
</activation>
<build>
<resources>
<resource>
@ -226,7 +231,7 @@
<id>prod</id>
</profile>
</profiles>
<build>
<resources>
<resource>

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.constant;
/**
* Configuration parameters.
* Configuration parameters.
*
* @author jtremeaux
* @author jtremeaux
*/
public enum ConfigType {
/**
@ -20,11 +20,6 @@ public enum ConfigType {
*/
GUEST_LOGIN,
/**
* OCR enabled.
*/
OCR_ENABLED,
/**
* Default language.
*/
@ -45,25 +40,7 @@ public enum ConfigType {
INBOX_ENABLED,
INBOX_HOSTNAME,
INBOX_PORT,
INBOX_STARTTLS,
INBOX_USERNAME,
INBOX_PASSWORD,
INBOX_FOLDER,
INBOX_TAG,
INBOX_AUTOMATIC_TAGS,
INBOX_DELETE_IMPORTED,
/**
* LDAP connection.
*/
LDAP_ENABLED,
LDAP_HOST,
LDAP_PORT,
LDAP_USESSL,
LDAP_ADMIN_DN,
LDAP_ADMIN_PASSWORD,
LDAP_BASE_DN,
LDAP_FILTER,
LDAP_DEFAULT_EMAIL,
LDAP_DEFAULT_STORAGE
INBOX_TAG
}

View File

@ -18,18 +18,13 @@ public class Constants {
/**
* Administrator's default password ("admin").
*/
public static final String DEFAULT_ADMIN_PASSWORD = "$2y$10$xg0EEKVUehutDI1m6qQhVeFz7SMQMl1jQzjf2KkVsR2c7aV2vyyjK";
public static final String DEFAULT_ADMIN_PASSWORD = "$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C";
/**
* Administrator's default email.
*/
public static final String DEFAULT_ADMIN_EMAIL = "admin@localhost";
/**
* Bcrypt default work factor
*/
public static final int DEFAULT_BCRYPT_WORK = 10;
/**
* Guest user ID.
*/
@ -43,7 +38,7 @@ public class Constants {
/**
* Supported document languages.
*/
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces", "sqi");
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb");
/**
* Base URL environment variable.
@ -78,11 +73,6 @@ public class Constants {
*/
public static final String ADMIN_EMAIL_INIT_ENV = "DOCS_ADMIN_EMAIL_INIT";
/**
* Work factor to be used by Bcrypt
*/
public static final String BCRYPT_WORK_ENV = "DOCS_BCRYPT_WORK";
/**
* Expiration time of the password recovery in hours.
*/

View File

@ -10,8 +10,8 @@ import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.SecurityUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -128,9 +128,6 @@ public class AclDao {
if (SecurityUtil.skipAclCheck(targetIdList)) {
return true;
}
if (targetIdList.isEmpty()) {
return false;
}
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select a.ACL_ID_C from T_ACL a ");

View File

@ -12,7 +12,7 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import javax.persistence.EntityManager;
import java.sql.Timestamp;
import java.util.*;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.Date;
import java.util.List;
import java.util.UUID;

View File

@ -6,9 +6,9 @@ import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
/**
* Configuration parameter DAO.

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
@ -56,7 +56,7 @@ public class ContributorDao {
@SuppressWarnings("unchecked")
public List<ContributorDto> getByDocumentId(String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
StringBuilder sb = new StringBuilder("select u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
sb.append(" join T_USER u on u.USE_ID_C = c.CTR_IDUSER_C ");
sb.append(" where c.CTR_IDDOC_C = :documentId ");
Query q = em.createNativeQuery(sb.toString());

View File

@ -7,10 +7,9 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
@ -51,9 +50,10 @@ public class DocumentDao {
* @param limit Limit
* @return List of documents
*/
@SuppressWarnings("unchecked")
public List<Document> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
Query q = em.createQuery("select d from Document d where d.deleteDate is null");
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList();
@ -65,9 +65,10 @@ public class DocumentDao {
* @param userId User ID
* @return List of documents
*/
@SuppressWarnings("unchecked")
public List<Document> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null");
q.setParameter("userId", userId);
return q.getResultList();
}
@ -87,7 +88,7 @@ public class DocumentDao {
}
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, d.DOC_IDFILE_C,");
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, ");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, ");
sb.append(" u.USE_USERNAME_C ");
@ -121,7 +122,6 @@ public class DocumentDao {
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setUpdateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
documentDto.setFileId((String) o[i++]);
documentDto.setShared(((Number) o[i++]).intValue() > 0);
documentDto.setFileCount(((Number) o[i++]).intValue());
documentDto.setCreator((String) o[i]);
@ -138,16 +138,16 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
dq.setParameter("id", id);
Document documentDb = dq.getSingleResult();
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
q.setParameter("id", id);
Document documentDb = (Document) q.getSingleResult();
// Delete the document
Date dateNow = new Date();
documentDb.setDeleteDate(dateNow);
// Delete linked data
Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
q.setParameter("documentId", id);
q.setParameter("dateNow", dateNow);
q.executeUpdate();
@ -179,10 +179,10 @@ public class DocumentDao {
*/
public Document getById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
q.setParameter("id", id);
try {
return q.getSingleResult();
return (Document) q.getSingleResult();
} catch (NoResultException e) {
return null;
}
@ -199,9 +199,9 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
q.setParameter("id", document.getId());
Document documentDb = q.getSingleResult();
Document documentDb = (Document) q.getSingleResult();
// Update the document
documentDb.setTitle(document.getTitle());
@ -232,11 +232,12 @@ public class DocumentDao {
*/
public void updateFileId(Document document) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query query = em.createNativeQuery("update T_DOCUMENT d set DOC_IDFILE_C = :fileId, DOC_UPDATEDATE_D = :updateDate where d.DOC_ID_C = :id");
Query query = em.createNativeQuery("update T_DOCUMENT d set d.DOC_IDFILE_C = :fileId, d.DOC_UPDATEDATE_D = :updateDate where d.DOC_ID_C = :id");
query.setParameter("updateDate", new Date());
query.setParameter("fileId", document.getFileId());
query.setParameter("id", document.getId());
query.executeUpdate();
}
/**

View File

@ -5,8 +5,8 @@ import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

View File

@ -4,16 +4,12 @@ import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import java.util.Collections;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
@ -51,9 +47,10 @@ public class FileDao {
* @param limit Limit
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.deleteDate is null");
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList();
@ -65,38 +62,28 @@ public class FileDao {
* @param userId User ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null");
q.setParameter("userId", userId);
return q.getResultList();
}
/**
* Returns a list of active files.
*
* @param ids Files IDs
* @return List of files
*/
public List<File> getFiles(List<String> ids) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
q.setParameter("ids", ids);
return q.getResultList();
}
/**
* Returns an active file or null.
* Returns an active file.
*
* @param id File ID
* @return File
* @return Document
*/
public File getFile(String id) {
List<File> files = getFiles(List.of(id));
if (files.isEmpty()) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
q.setParameter("id", id);
try {
return (File) q.getSingleResult();
} catch (NoResultException e) {
return null;
} else {
return files.get(0);
}
}
@ -105,15 +92,15 @@ public class FileDao {
*
* @param id File ID
* @param userId User ID
* @return File
* @return Document
*/
public File getFile(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null");
q.setParameter("id", id);
q.setParameter("userId", userId);
try {
return q.getSingleResult();
return (File) q.getSingleResult();
} catch (NoResultException e) {
return null;
}
@ -129,9 +116,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
q.setParameter("id", id);
File fileDb = q.getSingleResult();
File fileDb = (File) q.getSingleResult();
// Delete the file
Date dateNow = new Date();
@ -151,9 +138,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
q.setParameter("id", file.getId());
File fileDb = q.getSingleResult();
File fileDb = (File) q.getSingleResult();
// Update the file
fileDb.setDocumentId(file.getDocumentId());
@ -163,11 +150,10 @@ public class FileDao {
fileDb.setMimeType(file.getMimeType());
fileDb.setVersionId(file.getVersionId());
fileDb.setLatestVersion(file.isLatestVersion());
fileDb.setSize(file.getSize());
return file;
}
/**
* Gets a file by its ID.
*
@ -176,82 +162,46 @@ public class FileDao {
*/
public File getActiveById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
q.setParameter("id", id);
try {
return q.getSingleResult();
return (File) q.getSingleResult();
} catch (NoResultException e) {
return null;
}
}
/**
* Get files by document ID or all orphan files of a user.
* Get files by document ID or all orphan files of an user.
*
* @param userId User ID
* @param documentId Document ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> getByDocumentId(String userId, String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
if (documentId == null) {
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc");
q.setParameter("userId", userId);
return q.getResultList();
} else {
return getByDocumentsIds(Collections.singleton(documentId));
}
}
/**
* Get files by documents IDs.
*
* @param documentIds Documents IDs
* @return List of files
*/
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("documentIds", documentIds);
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc");
q.setParameter("documentId", documentId);
return q.getResultList();
}
/**
* Get files count by documents IDs.
*
* @param documentIds Documents IDs
* @return the number of files per document id
*/
public Map<String, Long> countByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f.documentId, count(*) from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null group by (f.documentId)");
q.setParameter("documentIds", documentIds);
Map<String, Long> result = new HashMap<>();
q.getResultList().forEach(o -> {
Object[] resultLine = (Object[]) o;
result.put((String) resultLine[0], (Long) resultLine[1]);
});
return result;
}
/**
* Get all files from a version.
*
* @param versionId Version ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> getByVersionId(String versionId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc");
q.setParameter("versionId", versionId);
return q.getResultList();
}
public List<File> getFilesWithUnknownSize(int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.size = :size and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("size", File.UNKNOWN_SIZE);
q.setMaxResults(limit);
return q.getResultList();
}
}

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.*;
/**
@ -183,10 +183,12 @@ public class GroupDao {
}
criteriaList.add("g.GRP_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked")

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.*;
/**
@ -123,8 +123,10 @@ public class MetadataDao {
criteriaList.add("m.MET_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -6,9 +6,9 @@ import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import org.joda.time.DurationFieldType;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.Date;
import java.util.UUID;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.RelationDto;
import com.sismics.docs.core.model.jpa.Relation;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.*;
/**

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.google.common.collect.Sets;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.Set;
/**

View File

@ -11,7 +11,7 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import javax.persistence.EntityManager;
import java.sql.Timestamp;
import java.util.*;
@ -64,8 +64,10 @@ public class RouteDao {
}
criteriaList.add("r.RTE_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.*;
@ -145,8 +145,10 @@ public class RouteModelDao {
criteriaList.add("rm.RTM_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -12,8 +12,8 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.*;
@ -90,8 +90,10 @@ public class RouteStepDao {
}
criteriaList.add("rs.RTP_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Share;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.Date;
import java.util.UUID;
@ -19,6 +19,7 @@ public class ShareDao {
*
* @param share Share
* @return New ID
* @throws Exception
*/
public String create(Share share) {
// Create the UUID

View File

@ -13,9 +13,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.*;
/**
@ -199,8 +199,10 @@ public class TagDao {
criteriaList.add("t.TAG_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@ -1,14 +1,7 @@
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import at.favre.lib.crypto.bcrypt.BCrypt;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.User;
@ -18,10 +11,12 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import org.mindrot.jbcrypt.BCrypt;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.*;
@ -31,11 +26,6 @@ import java.util.*;
* @author jtremeaux
*/
public class UserDao {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(UserDao.class);
/**
* Authenticates an user.
*
@ -49,8 +39,7 @@ public class UserDao {
q.setParameter("username", username);
try {
User user = (User) q.getSingleResult();
BCrypt.Result result = BCrypt.verifyer().verify(password.toCharArray(), user.getPassword());
if (!result.verified || user.getDisableDate() != null) {
if (!BCrypt.checkpw(password, user.getPassword()) || user.getDisableDate() != null) {
return null;
}
return user;
@ -288,21 +277,7 @@ public class UserDao {
* @return Hashed password
*/
private String hashPassword(String password) {
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
if (!Strings.isNullOrEmpty(envBcryptWork)) {
try {
int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {
bcryptWork = envBcryptWorkInt;
} else {
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
}
} catch (NumberFormatException e) {
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be a number in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
}
}
return BCrypt.withDefaults().hashToString(bcryptWork, password.toCharArray());
return BCrypt.hashpw(password, BCrypt.gensalt());
}
/**

View File

@ -3,9 +3,9 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.List;
import java.util.UUID;
@ -20,6 +20,7 @@ public class VocabularyDao {
*
* @param vocabulary Vocabulary
* @return New ID
* @throws Exception
*/
public String create(Vocabulary vocabulary) {
// Create the UUID

View File

@ -9,9 +9,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.*;
@ -42,9 +42,11 @@ public class WebhookDao {
}
criteriaList.add("w.WHK_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked")

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.dao.criteria;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -19,7 +18,7 @@ public class DocumentCriteria {
/**
* Search query.
*/
private String simpleSearch;
private String search;
/**
* Full content search query.
@ -50,13 +49,13 @@ public class DocumentCriteria {
* Tag IDs.
* The first level list will be AND'ed and the second level list will be OR'ed.
*/
private List<List<String>> tagIdList = new ArrayList<>();
private List<List<String>> tagIdList;
/**
* Tag IDs to exclude.
* Tag IDs to excluded.
* The first and second level list will be excluded.
*/
private List<List<String>> excludedTagIdList = new ArrayList<>();
private List<List<String>> excludedTagIdList;
/**
* Shared status.
@ -77,17 +76,7 @@ public class DocumentCriteria {
* A route is active.
*/
private Boolean activeRoute;
/**
* MIME type of a file.
*/
private String mimeType;
/**
* Titles to include.
*/
private List<String> titleList = new ArrayList<>();
public List<String> getTargetIdList() {
return targetIdList;
}
@ -96,12 +85,12 @@ public class DocumentCriteria {
this.targetIdList = targetIdList;
}
public String getSimpleSearch() {
return simpleSearch;
public String getSearch() {
return search;
}
public void setSimpleSearch(String search) {
this.simpleSearch = search;
public void setSearch(String search) {
this.search = search;
}
public String getFullSearch() {
@ -132,10 +121,19 @@ public class DocumentCriteria {
return tagIdList;
}
public void setTagIdList(List<List<String>> tagIdList) {
this.tagIdList = tagIdList;
}
public List<List<String>> getExcludedTagIdList() {
return excludedTagIdList;
}
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
this.excludedTagIdList = excludedTagIdList;
return this;
}
public Boolean getShared() {
return shared;
}
@ -159,7 +157,11 @@ public class DocumentCriteria {
public void setCreatorId(String creatorId) {
this.creatorId = creatorId;
}
public Boolean getActiveRoute() {
return activeRoute;
}
public Date getUpdateDateMin() {
return updateDateMin;
}
@ -176,23 +178,7 @@ public class DocumentCriteria {
this.updateDateMax = updateDateMax;
}
public Boolean getActiveRoute() {
return activeRoute;
}
public void setActiveRoute(Boolean activeRoute) {
this.activeRoute = activeRoute;
}
public String getMimeType() {
return mimeType;
}
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
public List<String> getTitleList() {
return titleList;
}
}

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.util.JsonUtil;
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
import javax.json.Json;
import javax.json.JsonObjectBuilder;
/**
* Route step DTO.

View File

@ -1,6 +1,7 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.Document;
/**
* Document created event.
@ -9,22 +10,32 @@ import com.google.common.base.MoreObjects;
*/
public class DocumentCreatedAsyncEvent extends UserEvent {
/**
* Document ID.
* Created document.
*/
private String documentId;
public String getDocumentId() {
return documentId;
private Document document;
/**
* Getter of document.
*
* @return the document
*/
public Document getDocument() {
return document;
}
public void setDocumentId(String documentId) {
this.documentId = documentId;
/**
* Setter of document.
*
* @param document document
*/
public void setDocument(Document document) {
this.document = document;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("documentId", documentId)
.toString();
.add("document", document)
.toString();
}
}

View File

@ -1,6 +1,7 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
/**
* File deleted event.
@ -9,33 +10,22 @@ import com.google.common.base.MoreObjects;
*/
public class FileDeletedAsyncEvent extends UserEvent {
/**
* File ID.
* Deleted file.
*/
private String fileId;
private Long fileSize;
public String getFileId() {
return fileId;
private File file;
public File getFile() {
return file;
}
public void setFileId(String fileId) {
this.fileId = fileId;
public void setFile(File file) {
this.file = file;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("fileId", fileId)
.add("fileSize", fileSize)
.add("file", file)
.toString();
}
}
}

View File

@ -1,6 +1,7 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
import java.nio.file.Path;
@ -11,9 +12,9 @@ import java.nio.file.Path;
*/
public abstract class FileEvent extends UserEvent {
/**
* File ID.
* Created file.
*/
private String fileId;
private File file;
/**
* Language of the file.
@ -24,15 +25,15 @@ public abstract class FileEvent extends UserEvent {
* Unencrypted original file.
*/
private Path unencryptedFile;
public String getFileId() {
return fileId;
public File getFile() {
return file;
}
public void setFileId(String fileId) {
this.fileId = fileId;
public void setFile(File file) {
this.file = file;
}
public String getLanguage() {
return language;
}
@ -53,7 +54,7 @@ public abstract class FileEvent extends UserEvent {
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("fileId", fileId)
.add("file", file)
.add("language", language)
.toString();
}

View File

@ -3,11 +3,9 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -36,22 +34,15 @@ public class DocumentCreatedAsyncListener {
}
TransactionUtil.handle(() -> {
// Fetch a fresh document
Document document = new DocumentDao().getById(event.getDocumentId());
if (document == null) {
// The document has been deleted since
return;
}
// Add the first contributor (the creator of the document)
ContributorDao contributorDao = new ContributorDao();
Contributor contributor = new Contributor();
contributor.setDocumentId(event.getDocumentId());
contributor.setDocumentId(event.getDocument().getId());
contributor.setUserId(event.getUserId());
contributorDao.create(contributor);
// Update index
AppContext.getInstance().getIndexingHandler().createDocument(document);
AppContext.getInstance().getIndexingHandler().createDocument(event.getDocument());
});
}
}

View File

@ -2,11 +2,9 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
@ -14,7 +12,7 @@ import org.slf4j.LoggerFactory;
/**
* Listener on file deleted.
*
*
* @author bgamard
*/
public class FileDeletedAsyncListener {
@ -25,7 +23,7 @@ public class FileDeletedAsyncListener {
/**
* File deleted.
*
*
* @param event File deleted event
* @throws Exception e
*/
@ -35,31 +33,14 @@ public class FileDeletedAsyncListener {
if (log.isInfoEnabled()) {
log.info("File deleted event: " + event.toString());
}
TransactionUtil.handle(() -> {
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId());
if (user != null) {
Long fileSize = event.getFileSize();
if (fileSize.equals(File.UNKNOWN_SIZE)) {
// The file size was not in the database, in this case we need to get from the unencrypted size.
fileSize = FileUtil.getFileSize(event.getFileId(), user);
}
if (! fileSize.equals(File.UNKNOWN_SIZE)) {
user.setStorageCurrent(user.getStorageCurrent() - fileSize);
userDao.updateQuota(user);
}
}
});
// Delete the file from storage
FileUtil.delete(event.getFileId());
File file = event.getFile();
FileUtil.delete(file);
TransactionUtil.handle(() -> {
// Update index
AppContext.getInstance().getIndexingHandler().deleteDocument(event.getFileId());
AppContext.getInstance().getIndexingHandler().deleteDocument(file.getId());
});
}
}

View File

@ -28,7 +28,6 @@ import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.MessageFormat;
import java.util.concurrent.atomic.AtomicReference;
/**
* Listener on file processing.
@ -53,7 +52,15 @@ public class FileProcessingAsyncListener {
log.info("File created event: " + event.toString());
}
processFile(event, true);
TransactionUtil.handle(() -> {
// Generate thumbnail, extract content
processFile(event);
// Update index
AppContext.getInstance().getIndexingHandler().createFile(event.getFile());
});
FileUtil.endProcessingFile(event.getFile().getId());
}
/**
@ -64,84 +71,43 @@ public class FileProcessingAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) {
log.info("File updated event: " + event.toString());
if (log.isInfoEnabled()) {
log.info("File updated event: " + event.toString());
}
processFile(event, false);
}
/**
* Process a file :
* Generate thumbnails
* Extract and save text content
*
* @param event File event
* @param isFileCreated True if the file was just created
*/
private void processFile(FileEvent event, boolean isFileCreated) {
AtomicReference<File> file = new AtomicReference<>();
AtomicReference<User> user = new AtomicReference<>();
// Open a first transaction to get what we need to start the processing
TransactionUtil.handle(() -> {
// Generate thumbnail, extract content
file.set(new FileDao().getActiveById(event.getFileId()));
if (file.get() == null) {
// The file has been deleted since
return;
}
processFile(event);
// Get the creating user from the database for its private key
UserDao userDao = new UserDao();
user.set(userDao.getById(file.get().getUserId()));
// Update index
AppContext.getInstance().getIndexingHandler().updateFile(event.getFile());
});
// Process the file outside of a transaction
if (user.get() == null || file.get() == null) {
// The user or file has been deleted
FileUtil.endProcessingFile(event.getFileId());
return;
}
String content = extractContent(event, user.get(), file.get());
// Open a new transaction to save the file content
TransactionUtil.handle(() -> {
// Save the file to database
FileDao fileDao = new FileDao();
File freshFile = fileDao.getActiveById(event.getFileId());
if (freshFile == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
freshFile.setContent(content);
fileDao.update(freshFile);
// Update index with the updated file
if (isFileCreated) {
AppContext.getInstance().getIndexingHandler().createFile(freshFile);
} else {
AppContext.getInstance().getIndexingHandler().updateFile(freshFile);
}
});
FileUtil.endProcessingFile(event.getFileId());
FileUtil.endProcessingFile(event.getFile().getId());
}
/**
* Extract text content from a file.
* This is executed outside of a transaction.
* Process the file (create/update).
*
* @param event File event
* @param user User whom created the file
* @param file Fresh file
* @return Text content
*/
private String extractContent(FileEvent event, User user, File file) {
private void processFile(FileEvent event) {
// Find a format handler
final File file = event.getFile();
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
if (formatHandler == null) {
log.info("Format unhandled: " + file.getMimeType());
return null;
FileUtil.endProcessingFile(file.getId());
return;
}
// Get the user from the database
UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId());
if (user == null) {
// The user has been deleted meanwhile
FileUtil.endProcessingFile(file.getId());
return;
}
// Generate file variations
@ -166,21 +132,28 @@ public class FileProcessingAsyncListener {
ImageUtil.writeJpeg(thumbnail, outputStream);
}
}
} catch (Throwable e) {
log.error("Unable to generate thumbnails for: " + file, e);
} catch (Exception e) {
log.error("Unable to generate thumbnails", e);
}
// Extract text content from the file
long startTime = System.currentTimeMillis();
String content = null;
log.info("Start extracting content from: " + file);
try {
content = formatHandler.extractContent(event.getLanguage(), event.getUnencryptedFile());
} catch (Throwable e) {
log.error("Error extracting content from: " + file, e);
} catch (Exception e) {
log.error("Error extracting content from: " + event.getFile(), e);
}
log.info(MessageFormat.format("File content extracted in {0}ms: " + file.getId(), System.currentTimeMillis() - startTime));
log.info(MessageFormat.format("File content extracted in {0}ms", System.currentTimeMillis() - startTime));
return content;
// Save the file to database
FileDao fileDao = new FileDao();
if (fileDao.getActiveById(file.getId()) == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
file.setContent(content);
fileDao.update(file);
}
}

View File

@ -36,7 +36,7 @@ public class WebhookAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final DocumentCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocumentId());
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocument().getId());
}
@Subscribe
@ -54,19 +54,19 @@ public class WebhookAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final FileCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFileId());
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFile().getId());
}
@Subscribe
@AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFileId());
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFile().getId());
}
@Subscribe
@AllowConcurrentEvents
public void on(final FileDeletedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFileId());
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFile().getId());
}
/**
@ -86,7 +86,7 @@ public class WebhookAsyncListener {
}
});
RequestBody body = RequestBody.create("{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}", JSON);
RequestBody body = RequestBody.create(JSON, "{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}");
for (String webhookUrl : webhookUrlList) {
Request request = new Request.Builder()

View File

@ -1,15 +1,14 @@
package com.sismics.docs.core.model.context;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService;
import com.sismics.docs.core.service.FileSizeService;
import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.indexing.IndexingHandler;
@ -66,11 +65,6 @@ public class AppContext {
*/
private FileService fileService;
/**
* File size service.
*/
private FileSizeService fileSizeService;
/**
* Asynchronous executors.
*/
@ -87,7 +81,7 @@ public class AppContext {
List<Class<? extends IndexingHandler>> indexingHandlerList = Lists.newArrayList(
new ClasspathScanner<IndexingHandler>().findClasses(IndexingHandler.class, "com.sismics.docs.core.util.indexing"));
for (Class<? extends IndexingHandler> handlerClass : indexingHandlerList) {
IndexingHandler handler = handlerClass.getDeclaredConstructor().newInstance();
IndexingHandler handler = handlerClass.newInstance();
if (handler.accept()) {
indexingHandler = handler;
break;
@ -108,17 +102,12 @@ public class AppContext {
inboxService.startAsync();
inboxService.awaitRunning();
// Start file size service
fileSizeService = new FileSizeService();
fileSizeService.startAsync();
fileSizeService.awaitRunning();
// Register fonts
PdfUtil.registerFonts();
// Change the admin password if needed
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
if (!Strings.isNullOrEmpty(envAdminPassword)) {
if (envAdminPassword != null) {
UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
@ -129,7 +118,7 @@ public class AppContext {
// Change the admin email if needed
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
if (!Strings.isNullOrEmpty(envAdminEmail)) {
if (envAdminEmail != null) {
UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {
@ -183,8 +172,7 @@ public class AppContext {
if (EnvironmentUtil.isUnitTest()) {
return new EventBus();
} else {
int threadCount = Math.max(Runtime.getRuntime().availableProcessors() / 2, 2);
ThreadPoolExecutor executor = new ThreadPoolExecutor(threadCount, threadCount,
ThreadPoolExecutor executor = new ThreadPoolExecutor(8, 8,
1L, TimeUnit.MINUTES,
new LinkedBlockingQueue<>());
asyncExecutorList.add(executor);
@ -249,10 +237,6 @@ public class AppContext {
fileService.stopAsync();
}
if (fileSizeService != null) {
fileSizeService.stopAsync();
}
instance = null;
}
}

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import jakarta.persistence.*;
import javax.persistence.*;
import java.util.Date;
/**

View File

@ -2,12 +2,12 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AuditLogType;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,11 +1,11 @@
package com.sismics.docs.core.model.jpa;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.ConfigType;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
/**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.sismics.util.mime.MimeTypeUtil;
import jakarta.persistence.*;
import javax.persistence.*;
import java.util.Date;
/**
@ -49,6 +49,7 @@ public class File implements Loggable {
/**
* OCR-ized content.
*/
@Lob
@Column(name = "FIL_CONTENT_C")
private String content;
@ -88,14 +89,6 @@ public class File implements Loggable {
@Column(name = "FIL_LATESTVERSION_B", nullable = false)
private boolean latestVersion;
public static final Long UNKNOWN_SIZE = -1L;
/**
* Can be {@link File#UNKNOWN_SIZE} if the size has not been stored in the database when the file has been uploaded
*/
@Column(name = "FIL_SIZE_N", nullable = false)
private Long size;
/**
* Private key to decrypt the file.
* Not saved to database, of course.
@ -212,18 +205,6 @@ public class File implements Loggable {
return this;
}
/**
* Can return {@link File#UNKNOWN_SIZE} if the file size is not stored in the database.
*/
public Long getSize() {
return size;
}
public File setSize(Long size) {
this.size = size;
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.MetadataType;
import jakarta.persistence.*;
import javax.persistence.*;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
/**

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.RouteStepTransition;
import com.sismics.docs.core.constant.RouteStepType;
import jakarta.persistence.*;
import javax.persistence.*;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.Date;
/**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable;
import java.util.Date;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.WebhookEvent;
import jakarta.persistence.*;
import javax.persistence.*;
import java.util.Date;
/**

View File

@ -69,18 +69,13 @@ public class FileService extends AbstractScheduledService {
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
}
public Path createTemporaryFile() throws IOException {
return createTemporaryFile(null);
}
/**
* Create a temporary file.
*
* @param name Wanted file name
* @return New temporary file
*/
public Path createTemporaryFile(String name) throws IOException {
Path path = Files.createTempFile("sismics_docs", name);
public Path createTemporaryFile() throws IOException {
Path path = Files.createTempFile("sismics_docs", null);
referenceSet.add(new TemporaryPathReference(path, referenceQueue));
return path;
}
@ -90,7 +85,7 @@ public class FileService extends AbstractScheduledService {
*
* @author bgamard
*/
static class TemporaryPathReference extends PhantomReference<Path> {
class TemporaryPathReference extends PhantomReference<Path> {
String path;
TemporaryPathReference(Path referent, ReferenceQueue<? super Path> q) {
super(referent, q);

View File

@ -1,78 +0,0 @@
package com.sismics.docs.core.service;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Service that retrieve files sizes when they are not in the database.
*/
public class FileSizeService extends AbstractScheduledService {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileSizeService.class);
public FileSizeService() {
}
@Override
protected void startUp() {
log.info("File size service starting up");
}
@Override
protected void shutDown() {
log.info("File size service shutting down");
}
private static final int BATCH_SIZE = 30;
@Override
protected void runOneIteration() {
try {
TransactionUtil.handle(() -> {
FileDao fileDao = new FileDao();
List<File> files = fileDao.getFilesWithUnknownSize(BATCH_SIZE);
for(File file : files) {
processFile(file);
}
if(files.size() < BATCH_SIZE) {
log.info("No more file to process, stopping the service");
stopAsync();
}
});
} catch (Throwable e) {
log.error("Exception during file service iteration", e);
}
}
void processFile(File file) {
UserDao userDao = new UserDao();
User user = userDao.getById(file.getUserId());
if(user == null) {
return;
}
long fileSize = FileUtil.getFileSize(file.getId(), user);
if(fileSize != File.UNKNOWN_SIZE){
FileDao fileDao = new FileDao();
file.setSize(fileSize);
fileDao.update(file);
}
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedDelaySchedule(0, 1, TimeUnit.MINUTES);
}
}

View File

@ -1,10 +1,9 @@
package com.sismics.docs.core.service;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;
@ -12,19 +11,17 @@ import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.util.DocumentUtil;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.EmailUtil;
import com.sismics.util.context.ThreadLocalContext;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.*;
import javax.mail.search.FlagTerm;
import java.util.*;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Inbox scanning service.
@ -82,25 +79,22 @@ public class InboxService extends AbstractScheduledService {
lastSyncDate = new Date();
lastSyncMessageCount = 0;
try {
Map<String, String> tagsNameToId = getAllTags();
inbox = openInbox();
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
log.info(messages.length + " messages found");
for (Message message : messages) {
importMessage(message, tagsNameToId);
importMessage(message);
lastSyncMessageCount++;
}
} catch (FolderClosedException e) {
// Ignore this, we will just continue importing on the next cycle
} catch (Exception e) {
log.error("Error syncing the inbox", e);
log.error("Error synching the inbox", e);
lastSyncError = e.getMessage();
} finally {
try {
if (inbox != null) {
// The parameter controls if the messages flagged to be deleted, should actually get deleted.
inbox.close(ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED));
inbox.close(false);
inbox.getStore().close();
}
} catch (Exception e) {
@ -156,7 +150,6 @@ public class InboxService extends AbstractScheduledService {
String port = ConfigUtil.getConfigStringValue(ConfigType.INBOX_PORT);
properties.put("mail.imap.host", ConfigUtil.getConfigStringValue(ConfigType.INBOX_HOSTNAME));
properties.put("mail.imap.port", port);
properties.setProperty("mail.imap.starttls.enable", ConfigUtil.getConfigStringValue(ConfigType.INBOX_STARTTLS).toString());
boolean isSsl = "993".equals(port);
properties.put("mail.imap.ssl.enable", String.valueOf(isSsl));
properties.setProperty("mail.imap.socketFactory.class",
@ -179,7 +172,7 @@ public class InboxService extends AbstractScheduledService {
store.connect(ConfigUtil.getConfigStringValue(ConfigType.INBOX_USERNAME),
ConfigUtil.getConfigStringValue(ConfigType.INBOX_PASSWORD));
Folder inbox = store.getFolder(ConfigUtil.getConfigStringValue(ConfigType.INBOX_FOLDER));
Folder inbox = store.getFolder("INBOX");
inbox.open(Folder.READ_WRITE);
return inbox;
}
@ -190,7 +183,7 @@ public class InboxService extends AbstractScheduledService {
* @param message Message
* @throws Exception e
*/
private void importMessage(Message message, Map<String, String> tags) throws Exception {
private void importMessage(Message message) throws Exception {
log.info("Importing message: " + message.getSubject());
// Parse the mail
@ -201,27 +194,12 @@ public class InboxService extends AbstractScheduledService {
// Create the document
Document document = new Document();
String subject = mailContent.getSubject();
if (subject == null) {
subject = "Imported email from EML file";
}
HashSet<String> tagsFound = new HashSet<>();
if (tags != null) {
Pattern pattern = Pattern.compile("#([^\\s:#]+)");
Matcher matcher = pattern.matcher(subject);
while (matcher.find()) {
if (tags.containsKey(matcher.group(1)) && tags.get(matcher.group(1)) != null) {
tagsFound.add(tags.get(matcher.group(1)));
subject = subject.replaceFirst("#" + matcher.group(1), "");
}
}
log.debug("Tags found: " + String.join(", ", tagsFound));
subject = subject.trim().replaceAll(" +", " ");
}
document.setUserId("admin");
document.setTitle(StringUtils.abbreviate(subject, 100));
if (mailContent.getSubject() == null) {
document.setTitle("Imported email from EML file");
} else {
document.setTitle(StringUtils.abbreviate(mailContent.getSubject(), 100));
}
document.setDescription(StringUtils.abbreviate(mailContent.getMessage(), 4000));
document.setSubject(StringUtils.abbreviate(mailContent.getSubject(), 500));
document.setFormat("EML");
@ -242,19 +220,14 @@ public class InboxService extends AbstractScheduledService {
TagDao tagDao = new TagDao();
Tag tag = tagDao.getById(tagId);
if (tag != null) {
tagsFound.add(tagId);
tagDao.updateTagList(document.getId(), Sets.newHashSet(tagId));
}
}
// Update tags
if (!tagsFound.isEmpty()) {
new TagDao().updateTagList(document.getId(), tagsFound);
}
// Raise a document created event
DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent();
documentCreatedAsyncEvent.setUserId("admin");
documentCreatedAsyncEvent.setDocumentId(document.getId());
documentCreatedAsyncEvent.setDocument(document);
ThreadLocalContext.get().addAsyncEvent(documentCreatedAsyncEvent);
// Add files to the document
@ -262,29 +235,6 @@ public class InboxService extends AbstractScheduledService {
FileUtil.createFile(fileContent.getName(), null, fileContent.getFile(), fileContent.getSize(),
document.getLanguage(), "admin", document.getId());
}
if (ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED)) {
message.setFlag(Flags.Flag.DELETED, true);
}
}
/**
* Fetches a HashMap with all tag names as keys and their respective ids as values.
*
* @return Map with all tags or null if not enabled
*/
private Map<String, String> getAllTags() {
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
return null;
}
TagDao tagDao = new TagDao();
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
Map<String, String> tagsNameToId = new HashMap<>();
for (TagDto tagDto : tags) {
tagsNameToId.put(tagDto.getName(), tagDto.getId());
}
return tagsNameToId;
}
public Date getLastSyncDate() {

View File

@ -9,7 +9,7 @@ import com.sismics.docs.core.util.action.RemoveTagAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
/**
* Action utilities.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.model.jpa.AuditLog;
import com.sismics.docs.core.model.jpa.Loggable;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import javax.persistence.EntityManager;
/**
* Audit log utilities.

View File

@ -8,12 +8,13 @@ import java.util.ResourceBundle;
/**
* Configuration parameter utilities.
*
*
* @author jtremeaux
*/
public class ConfigUtil {
/**
* Returns the textual value of a configuration parameter.
*
*
* @param configType Type of the configuration parameter
* @return Textual value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
@ -29,7 +30,7 @@ public class ConfigUtil {
/**
* Returns the configuration resource bundle.
*
*
* @return Resource bundle
*/
public static ResourceBundle getConfigBundle() {
@ -38,54 +39,27 @@ public class ConfigUtil {
/**
* Returns the integer value of a configuration parameter.
*
*
* @param configType Type of the configuration parameter
* @return Integer value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
*/
public static int getConfigIntegerValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Integer.parseInt(value);
}
/**
* Returns the long value of a configuration parameter.
*
* @param configType Type of the configuration parameter
* @return Long value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
*/
public static long getConfigLongValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Long.parseLong(value);
}
/**
* Returns the boolean value of a configuration parameter.
*
*
* @param configType Type of the configuration parameter
* @return Boolean value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
*/
public static boolean getConfigBooleanValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Boolean.parseBoolean(value);
}
/**
* Returns the boolean value of a configuration parameter with a default value.
*
* @param configType Type of the configuration parameter
* @param defaultValue Default value to return if the configuration parameter is undefined
* @return Boolean value of the configuration parameter
*/
public static boolean getConfigBooleanValue(ConfigType configType, boolean defaultValue) {
try {
return getConfigBooleanValue(configType);
} catch (IllegalStateException e) {
return defaultValue;
}
}
}

View File

@ -5,7 +5,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import com.sismics.util.EnvironmentUtil;

View File

@ -1,5 +1,6 @@
package com.sismics.docs.core.util;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
@ -16,12 +17,7 @@ import com.sismics.util.Scalr;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.CountingInputStream;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
@ -30,7 +26,6 @@ import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
@ -41,15 +36,10 @@ import java.util.*;
* @author bgamard
*/
public class FileUtil {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileUtil.class);
/**
* File ID of files currently being processed.
*/
private static final Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
private static Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
/**
* Optical character recognition on an image.
@ -79,19 +69,19 @@ public class FileUtil {
// Consume the data as text
try (InputStream is = process.getInputStream()) {
return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8));
}
}
/**
* Remove a file from the storage filesystem.
*
* @param fileId ID of file to delete
* @param file File to delete
*/
public static void delete(String fileId) throws IOException {
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
Path webFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_web");
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_thumb");
public static void delete(File file) throws IOException {
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(file.getId());
Path webFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_web");
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_thumb");
if (Files.exists(storedFile)) {
Files.delete(storedFile);
@ -136,7 +126,7 @@ public class FileUtil {
// Validate global quota
String globalStorageQuotaStr = System.getenv(Constants.GLOBAL_QUOTA_ENV);
if (!Strings.isNullOrEmpty(globalStorageQuotaStr)) {
long globalStorageQuota = Long.parseLong(globalStorageQuotaStr);
long globalStorageQuota = Long.valueOf(globalStorageQuotaStr);
long globalStorageCurrent = userDao.getGlobalStorageCurrent();
if (globalStorageCurrent + fileSize > globalStorageQuota) {
throw new IOException("QuotaReached");
@ -152,7 +142,6 @@ public class FileUtil {
file.setName(StringUtils.abbreviate(name, 200));
file.setMimeType(mimeType);
file.setUserId(userId);
file.setSize(fileSize);
// Get files of this document
FileDao fileDao = new FileDao();
@ -201,7 +190,7 @@ public class FileUtil {
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
fileCreatedAsyncEvent.setUserId(userId);
fileCreatedAsyncEvent.setLanguage(language);
fileCreatedAsyncEvent.setFileId(file.getId());
fileCreatedAsyncEvent.setFile(file);
fileCreatedAsyncEvent.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(fileCreatedAsyncEvent);
@ -222,7 +211,6 @@ public class FileUtil {
*/
public static void startProcessingFile(String fileId) {
processingFileSet.add(fileId);
log.info("Processing started for file: " + fileId);
}
/**
@ -232,7 +220,6 @@ public class FileUtil {
*/
public static void endProcessingFile(String fileId) {
processingFileSet.remove(fileId);
log.info("Processing ended for file: " + fileId);
}
/**
@ -244,31 +231,4 @@ public class FileUtil {
public static boolean isProcessingFile(String fileId) {
return processingFileSet.contains(fileId);
}
/**
* Get the size of a file on disk.
*
* @param fileId the file id
* @param user the file owner
* @return the size or -1 if something went wrong
*/
public static long getFileSize(String fileId, User user) {
// To get the size we copy the decrypted content into a null output stream
// and count the copied byte size.
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
if (! Files.exists(storedFile)) {
log.debug("File does not exist " + fileId);
return File.UNKNOWN_SIZE;
}
try (InputStream fileInputStream = Files.newInputStream(storedFile);
InputStream inputStream = EncryptionUtil.decryptInputStream(fileInputStream, user.getPrivateKey());
CountingInputStream countingInputStream = new CountingInputStream(inputStream);
) {
IOUtils.copy(countingInputStream, NullOutputStream.NULL_OUTPUT_STREAM);
return countingInputStream.getByteCount();
} catch (Exception e) {
log.debug("Can't find size of file " + fileId, e);
return File.UNKNOWN_SIZE;
}
}
}

View File

@ -10,9 +10,9 @@ import com.sismics.docs.core.dao.dto.MetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.docs.core.util.jpa.SortCriteria;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObjectBuilder;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import java.text.MessageFormat;
import java.util.List;
import java.util.Map;

View File

@ -19,10 +19,10 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonObject;
import jakarta.json.JsonReader;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import java.io.StringReader;
import java.util.List;

View File

@ -1,9 +1,8 @@
package com.sismics.docs.rest.util;
package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.sismics.docs.core.dao.dto.TagDto;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
@ -13,14 +12,14 @@ import java.util.List;
*/
public class TagUtil {
/**
* Recursively find children of a tag.
* Recursively find children of a tags.
*
* @param parentTagDto Parent tag
* @param allTagDtoList List of all tags
* @return Children tags
*/
public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) {
List<TagDto> childrenTagDtoList = new ArrayList<>();
List<TagDto> childrenTagDtoList = Lists.newArrayList();
for (TagDto tagDto : allTagDtoList) {
if (parentTagDto.getId().equals(tagDto.getParentId())) {
@ -33,17 +32,17 @@ public class TagUtil {
}
/**
* Find tags by name (start with, ignore case).
* Find tags by name (start with).
*
* @param name Name
* @param allTagDtoList List of all tags
* @return List of filtered tags
*/
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
if (name.isEmpty()) {
return Collections.emptyList();
List<TagDto> tagDtoList = Lists.newArrayList();
if (name == null || name.isEmpty()) {
return tagDtoList;
}
List<TagDto> tagDtoList = new ArrayList<>();
name = name.toLowerCase();
for (TagDto tagDto : allTagDtoList) {
if (tagDto.getName().toLowerCase().startsWith(name)) {

View File

@ -5,8 +5,8 @@ import com.sismics.util.jpa.EMF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
/**
* Database transaction utils.

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.util.action;
import com.sismics.docs.core.dao.dto.DocumentDto;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
/**
* Base action interface.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
import java.util.List;
import java.util.Set;

View File

@ -13,7 +13,7 @@ import com.sismics.util.context.ThreadLocalContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
import java.nio.file.Path;
import java.util.List;
@ -48,7 +48,7 @@ public class ProcessFilesAction implements Action {
FileUpdatedAsyncEvent event = new FileUpdatedAsyncEvent();
event.setUserId("admin");
event.setLanguage(documentDto.getLanguage());
event.setFileId(file.getId());
event.setFile(file);
event.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(event);
}

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
import java.util.List;
import java.util.Set;

View File

@ -4,7 +4,7 @@ import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import jakarta.json.JsonObject;
import javax.json.JsonObject;
import java.util.List;
/**

View File

@ -20,7 +20,7 @@ public class AuthenticationUtil {
.map(clazz -> {
try {
return clazz.getDeclaredConstructor().newInstance();
return clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -1,108 +0,0 @@
package com.sismics.docs.core.util.authentication;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ClasspathScanner;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
/**
* LDAP authentication handler.
*
* @author bgamard
*/
@ClasspathScanner.Priority(50) // Before the internal database
public class LdapAuthenticationHandler implements AuthenticationHandler {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/**
* Get a LDAP connection.
* @return LdapConnection
*/
private LdapConnection getConnection() {
ConfigDao configDao = new ConfigDao();
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
if (ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return null;
}
LdapConnectionConfig config = new LdapConnectionConfig();
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
config.setUseSsl(ConfigUtil.getConfigBooleanValue(ConfigType.LDAP_USESSL));
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
return new LdapNetworkConnection(config);
}
@Override
public User authenticate(String username, String password) {
// Fetch and authenticate the user
Entry userEntry;
try (LdapConnection ldapConnection = getConnection()) {
if (ldapConnection == null) {
return null;
}
ldapConnection.bind();
EntryCursor cursor = ldapConnection.search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
if (cursor.next()) {
userEntry = cursor.get();
ldapConnection.bind(userEntry.getDn(), password);
} else {
// User not found
return null;
}
} catch (Exception e) {
log.error("Error authenticating \"" + username + "\" using the LDAP", e);
return null;
}
UserDao userDao = new UserDao();
User user = userDao.getActiveByUsername(username);
if (user == null) {
// The user is valid but never authenticated, create the user now
log.info("\"" + username + "\" authenticated for the first time, creating the internal user");
user = new User();
user.setRoleId(Constants.DEFAULT_USER_ROLE);
user.setUsername(username);
user.setPassword(UUID.randomUUID().toString()); // No authentication using the internal database
Attribute mailAttribute = userEntry.get("mail");
if (mailAttribute == null || mailAttribute.get() == null) {
user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL));
} else {
Value value = mailAttribute.get();
user.setEmail(value.getString());
}
user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));
try {
userDao.create(user, "admin");
} catch (Exception e) {
log.error("Error while creating the internal user", e);
return null;
}
}
return user;
}
}

View File

@ -26,12 +26,12 @@ public class FormatHandlerUtil {
public static FormatHandler find(String mimeType) {
try {
for (Class<? extends FormatHandler> formatHandlerClass : FORMAT_HANDLERS) {
FormatHandler formatHandler = formatHandlerClass.getDeclaredConstructor().newInstance();
FormatHandler formatHandler = formatHandlerClass.newInstance();
if (formatHandler.accept(mimeType)) {
return formatHandler;
}
}
} catch (Exception e) {
} catch (InstantiationException | IllegalAccessException e) {
return null;
}

View File

@ -3,8 +3,6 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.pdmodel.PDDocument;
@ -24,6 +22,7 @@ import java.nio.file.Path;
/**
* Image format handler.
*
* @author bgamard
*/
public class ImageFormatHandler implements FormatHandler {
/**
@ -46,7 +45,7 @@ public class ImageFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
if (language == null || !ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
if (language == null) {
return null;
}

View File

@ -2,13 +2,10 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.multipdf.PDFMergerUtility;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.rendering.ImageType;
import org.apache.pdfbox.rendering.PDFRenderer;
import org.apache.pdfbox.text.PDFTextStripper;
import org.slf4j.Logger;
@ -55,7 +52,7 @@ public class PdfFormatHandler implements FormatHandler {
}
// No text content, try to OCR it
if (language != null && content != null && content.trim().isEmpty() && ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
if (language != null && content != null && content.trim().isEmpty()) {
StringBuilder sb = new StringBuilder();
try (InputStream inputStream = Files.newInputStream(file);
PDDocument pdfDocument = PDDocument.load(inputStream)) {
@ -63,7 +60,7 @@ public class PdfFormatHandler implements FormatHandler {
for (int pageIndex = 0; pageIndex < pdfDocument.getNumberOfPages(); pageIndex++) {
log.info("OCR page " + (pageIndex + 1) + "/" + pdfDocument.getNumberOfPages() + " of PDF file containing only images");
sb.append(" ");
sb.append(FileUtil.ocrFile(language, renderer.renderImageWithDPI(pageIndex, 300, ImageType.GRAY)));
sb.append(FileUtil.ocrFile(language, renderer.renderImage(pageIndex)));
}
return sb.toString();
} catch (Exception e) {

View File

@ -9,7 +9,7 @@ import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.apache.poi.xslf.usermodel.XMLSlideShow;
import org.apache.poi.xslf.usermodel.XSLFSlide;
@ -50,7 +50,7 @@ public class PptxFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
XMLSlideShow pptx = loadPPtxFile(file);
return new SlideShowExtractor<>(pptx).getText();
return new XSLFPowerPointExtractor(pptx).getText();
}
@Override

View File

@ -1,5 +1,6 @@
package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.io.Closer;
import com.lowagie.text.*;
import com.lowagie.text.pdf.PdfWriter;
@ -10,7 +11,6 @@ import org.apache.pdfbox.pdmodel.PDDocument;
import java.awt.image.BufferedImage;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@ -33,7 +33,7 @@ public class TextPlainFormatHandler implements FormatHandler {
PdfWriter.getInstance(output, pdfOutputStream);
output.open();
String content = Files.readString(file, StandardCharsets.UTF_8);
String content = new String(Files.readAllBytes(file), Charsets.UTF_8);
Font font = FontFactory.getFont("LiberationMono-Regular");
Paragraph paragraph = new Paragraph(content, font);
paragraph.setAlignment(Element.ALIGN_LEFT);
@ -46,7 +46,7 @@ public class TextPlainFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
return Files.readString(file, StandardCharsets.UTF_8);
return new String(Files.readAllBytes(file), "UTF-8");
}
@Override

View File

@ -1,5 +1,6 @@
package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
@ -12,7 +13,6 @@ import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
// Consume the data as a string
try (InputStream is = process.getInputStream()) {
return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8);
} catch (Exception e) {
return null;
}

View File

@ -26,18 +26,9 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.*;
import org.apache.lucene.queryparser.simple.SimpleQueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.*;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
@ -46,22 +37,18 @@ import org.apache.lucene.search.spell.LuceneDictionary;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.*;
/**
* Lucene indexing handler.
@ -130,7 +117,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
} else if (luceneStorage.equals("FILE")) {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory);
directory = new NIOFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
}
// Create an index writer
@ -256,28 +243,34 @@ public class LuceneIndexingHandler implements IndexingHandler {
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, ");
sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C ");
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D IS NULL group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
if (criteria.getTargetIdList() != null) {
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
parameterMap.put("targetIdList", criteria.getTargetIdList());
}
parameterMap.put("targetIdList", criteria.getTargetIdList());
if (!Strings.isNullOrEmpty(criteria.getSimpleSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSimpleSearch(), criteria.getFullSearch());
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSearch(), criteria.getFullSearch());
if (documentSearchMap.isEmpty()) {
// If the search doesn't find any document, the request should return nothing
documentSearchMap.put(UUID.randomUUID().toString(), null);
@ -285,7 +278,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_ID_C in :documentIdList");
parameterMap.put("documentIdList", documentSearchMap.keySet());
suggestSearchTerms(criteria.getFullSearch(), suggestionList);
suggestSearchTerms(criteria.getSearch(), suggestionList);
}
if (criteria.getCreateDateMin() != null) {
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
@ -303,11 +296,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
}
if (!criteria.getTitleList().isEmpty()) {
criteriaList.add("d.DOC_TITLE_C in :title");
parameterMap.put("title", criteria.getTitleList());
}
if (!criteria.getTagIdList().isEmpty()) {
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();
@ -320,7 +309,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
}
}
if (!criteria.getExcludedTagIdList().isEmpty()) {
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();
@ -336,11 +325,6 @@ public class LuceneIndexingHandler implements IndexingHandler {
if (criteria.getShared() != null && criteria.getShared()) {
criteriaList.add("s.count > 0");
}
if (criteria.getMimeType() != null) {
sb.append("left join T_FILE f0 on f0.FIL_IDDOC_C = d.DOC_ID_C and f0.FIL_MIMETYPE_C = :mimeType and f0.FIL_DELETEDATE_D is null");
parameterMap.put("mimeType", criteria.getMimeType());
criteriaList.add("f0.FIL_ID_C is not null");
}
if (criteria.getLanguage() != null) {
criteriaList.add("d.DOC_LANGUAGE_C = :language");
parameterMap.put("language", criteria.getLanguage());
@ -355,8 +339,10 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_DELETEDATE_D is null");
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
@ -375,6 +361,8 @@ public class LuceneIndexingHandler implements IndexingHandler {
documentDto.setFileId((String) o[i++]);
Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
@ -402,7 +390,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
LuceneDictionary dictionary = new LuceneDictionary(directoryReader, "title");
suggester.build(dictionary);
int lastIndex = search.lastIndexOf(' ');
String suggestQuery = search.substring(Math.max(lastIndex, 0));
String suggestQuery = search.substring(lastIndex < 0 ? 0 : lastIndex);
List<Lookup.LookupResult> lookupResultList = suggester.lookup(suggestQuery, false, 10);
for (Lookup.LookupResult lookupResult : lookupResultList) {
suggestionList.add(lookupResult.key.toString());
@ -412,14 +400,14 @@ public class LuceneIndexingHandler implements IndexingHandler {
/**
* Fulltext search in files and documents.
*
* @param simpleSearchQuery Search query on metadatas
* @param searchQuery Search query on metadatas
* @param fullSearchQuery Search query on all fields
* @return Map of document IDs as key and highlight as value
* @throws Exception e
*/
private Map<String, String> search(String simpleSearchQuery, String fullSearchQuery) throws Exception {
private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception {
// The fulltext query searches in all fields
String searchQuery = simpleSearchQuery + " " + fullSearchQuery;
searchQuery = searchQuery + " " + fullSearchQuery;
// Build search query
Analyzer analyzer = new StandardAnalyzer();

View File

@ -1,6 +1,6 @@
package com.sismics.docs.core.util.jpa;
import jakarta.persistence.Query;
import javax.persistence.Query;
import java.util.List;
/**
@ -68,7 +68,7 @@ public class PaginatedLists {
}
/**
* Executes a query and returns the data of the current page.
* Executes a query and returns the data of the currunt page.
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
@ -82,6 +82,18 @@ public class PaginatedLists {
q.setMaxResults(paginatedList.getLimit());
return q.getResultList();
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
* @return List of results
*/
public static <E> List<Object[]> executePaginatedQuery(PaginatedList<E> paginatedList, QueryParam queryParam) {
executeCountQuery(paginatedList, queryParam);
return executeResultQuery(paginatedList, queryParam);
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).

Some files were not shown because too many files have changed in this diff Show More