Compare commits

...

42 Commits

Author SHA1 Message Date
Alexander ADAM
c2d7f3ebc6
feat: add option to disable OCR (#768)
fixes #344
refs #767
2024-09-07 22:27:48 +02:00
Kaiyao Ke
8f1ff56d34
fixed remaining non-idempotent tests (#758) 2024-06-03 10:28:10 +02:00
Kaiyao Ke
11ae0ea7d3
fixed non-idempotent tests (#757) 2024-05-17 15:37:43 +02:00
bgamard
afa78857f9 fix test 2024-04-03 22:03:52 +02:00
bgamard
ae2423b2e9 remove check_username route 2024-04-03 21:53:00 +02:00
bgamard
01d3e746d8 cleanup Dockerfile + upgrade to jetty 11.0.20 2024-03-01 22:06:50 +01:00
Sukalpo Mitra
13cd03a762
Self contained Teedy Dockerfile (#745) 2024-03-01 21:46:17 +01:00
bgamard
ac7b3c4eb9 Merge remote-tracking branch 'origin/master' 2024-02-19 18:34:02 +01:00
bgamard
7effbc8de0 list display mode in the share app 2024-02-19 18:33:56 +01:00
Sukalpo Mitra
8c5f0c78e7
Added support for JWT based authentication (#739) 2023-12-02 17:57:51 +01:00
Erich Mauerböck
45e00ac93d
add explicit binding (#735)
* add explicit binding

* fixup building on windows

* reactivate unit test

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:58:11 +01:00
Erich Mauerböck
80454afc0d
fix unit test (#736)
Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:57:33 +01:00
Erich Mauerböck
428e898a7a
allow hyphen in username (#731)
* allow hyphen in username

* remove extra escaping

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-03 15:36:02 +01:00
Julien Kirch
13762eb67f
Upgrade pdfbox version to 2.0.29 (#728) 2023-10-20 15:41:45 +02:00
Julien Kirch
04c43ebf7b
Specify document search parameter as HTTP params (#722) 2023-10-19 18:34:04 +02:00
Julien Kirch
f9b5a5212d
Allow to specify a pool size (#727) 2023-10-09 14:05:13 +02:00
Julien Kirch
0351f94761
Upgrade Hibernate version (#726) 2023-10-09 12:36:53 +02:00
Julien Kirch
a89543b555
Make search for documents faster for large dataset (#698) 2023-10-08 22:07:01 +02:00
Benjamin Gamard
ce30b1a6ff
fix build 2023-09-15 22:05:04 +02:00
Orland Karamani
1b382004cb
Albanian Language Support (#719)
Co-authored-by: Orlando Karamani <orlandothemover@gmail.com>
2023-09-14 16:51:11 +02:00
Julien Kirch
ab7ff25929
Store file size in DB (#704) 2023-09-14 16:50:39 +02:00
Julien Kirch
eedf19ad9d
Fix no favicon on shares #580 (#718) 2023-09-08 15:43:35 +02:00
Julien Kirch
941ace99c6
Fix typo in /file/:id/versions description (#717) 2023-09-07 16:46:43 +02:00
bgamard
95e0b870f6 Merge remote-tracking branch 'origin/master' 2023-06-29 21:33:12 +02:00
bgamard
2bdb2dc34f #678: reopen ldap connection for each login 2023-06-29 21:33:05 +02:00
Julien Kirch
22a44d0c8d
Finding several documents by their title in a single query (#696) 2023-06-06 21:31:01 +02:00
Julien Kirch
a9cdbdc03e
Add missing french translations (#694) 2023-06-05 16:02:55 +02:00
Julien Kirch
3fd5470eae
Add mention in the API doc that document endpoint returns the document's metadata (#695) 2023-06-04 21:49:36 +02:00
39f96cbd28
Update config.properties (#693)
fix db version to reflect the most recent
2023-06-04 21:48:55 +02:00
4501f10429
fix comma to make valid language de.json file again 2023-05-07 11:56:34 +02:00
bd0cde7e87
Add support for STARTTLS for Inbox Scanning (#682) 2023-04-25 18:27:46 +02:00
bgamard
dd36e08d7d #680: warning when using H2 database 2023-04-22 00:47:01 +02:00
Jose Luis Montes Jiménez
4634def93e
updating README.md (#681)
H2 database should only be use for testing, setting the docker-compose with postgreSQL as default way.
2023-04-22 00:12:48 +02:00
bgamard
1974a8bb8d #668: cleanup hibernate dependencies 2023-04-12 17:58:51 +02:00
bgamard
e9a6609593 #668: jetty 11 deployment 2023-04-12 13:35:54 +02:00
bgamard
b20577026e Closes #668: upgrade jetty/servlet-api/jersey 2023-04-09 21:31:53 +02:00
bgamard
dae9e137f7 Merge remote-tracking branch 'origin/master' 2023-03-22 10:23:18 +01:00
bgamard
1509d0c5bb revert h2 upgrade 2023-03-22 10:23:11 +01:00
430ebbd1c5
support ldaps (#670) 2023-03-21 21:56:14 +01:00
bgamard
b561eaee6d portuguese translation 2023-03-20 20:20:52 +01:00
bgamard
1aa21c3762 bump dependencies 2023-03-19 14:28:22 +01:00
bgamard
c8a67177d8 next dev iteration 2023-03-12 14:11:52 +01:00
190 changed files with 4861 additions and 1542 deletions

View File

@ -19,9 +19,9 @@ jobs:
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
run: sudo apt-get update && sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn -Pprod clean install
run: mvn --batch-mode -Pprod clean install
- name: Upload war artifact
uses: actions/upload-artifact@v2
with:

View File

@ -1,8 +1,21 @@
FROM sismics/ubuntu-jetty:9.4.51
FROM ubuntu:22.04
LABEL maintainer="b.gamard@sismics.com"
# Run Debian in non interactive mode
ENV DEBIAN_FRONTEND noninteractive
# Configure env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV JAVA_HOME /usr/lib/jvm/java-11-openjdk-amd64/
ENV JAVA_OPTIONS -Dfile.encoding=UTF-8 -Xmx1g
ENV JETTY_VERSION 11.0.20
ENV JETTY_HOME /opt/jetty
# Install packages
RUN apt-get update && \
apt-get -y -q --no-install-recommends install \
vim less procps unzip wget tzdata openjdk-11-jdk \
ffmpeg \
mediainfo \
tesseract-ocr \
@ -31,13 +44,32 @@ RUN apt-get update && \
tesseract-ocr-tha \
tesseract-ocr-tur \
tesseract-ocr-ukr \
tesseract-ocr-vie && \
apt-get clean && rm -rf /var/lib/apt/lists/*
tesseract-ocr-vie \
tesseract-ocr-sqi \
&& apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN dpkg-reconfigure -f noninteractive tzdata
# Remove the embedded javax.mail jar from Jetty
RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar
# Install Jetty
RUN wget -nv -O /tmp/jetty.tar.gz \
"https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-home/${JETTY_VERSION}/jetty-home-${JETTY_VERSION}.tar.gz" \
&& tar xzf /tmp/jetty.tar.gz -C /opt \
&& mv /opt/jetty* /opt/jetty \
&& useradd jetty -U -s /bin/false \
&& chown -R jetty:jetty /opt/jetty \
&& mkdir /opt/jetty/webapps \
&& chmod +x /opt/jetty/bin/jetty.sh
ADD docs.xml /opt/jetty/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war
EXPOSE 8080
ENV JAVA_OPTIONS -Xmx1g
# Install app
RUN mkdir /app && \
cd /app && \
java -jar /opt/jetty/start.jar --add-modules=server,http,webapp,deploy
ADD docs.xml /app/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /app/webapps/docs.war
WORKDIR /app
CMD ["java", "-jar", "/opt/jetty/start.jar"]

View File

@ -3,6 +3,7 @@
</h3>
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Maven CI/CD](https://github.com/sismics/docs/actions/workflows/build-deploy.yml/badge.svg)](https://github.com/sismics/docs/actions/workflows/build-deploy.yml)
Teedy is an open source, lightweight document management system for individuals and businesses.
@ -54,7 +55,7 @@ A demo is available at [demo.teedy.io](https://demo.teedy.io)
# Install with Docker
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. If no PostgreSQL config is provided, the database is an embedded H2 database. The H2 embedded database should only be used for testing. For production usage use the provided PostgreSQL configuration (check the Docker Compose example)
**The default admin password is "admin". Don't forget to change it before going to production.**
@ -80,6 +81,7 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
- `DATABASE_USER`: The user which should be used for the database connection.
- `DATABASE_PASSWORD`: The password to be used for the database connection.
- `DATABASE_POOL_SIZE`: The pool size to be used for the database connection.
- Language
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
@ -95,30 +97,8 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
### Using the internal database
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
### Using PostgreSQL
### Default, using PostgreSQL
```yaml
version: '3'
@ -143,6 +123,7 @@ services:
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
DATABASE_USER: "teedy_db_user"
DATABASE_PASSWORD: "teedy_db_password"
DATABASE_POOL_SIZE: "10"
volumes:
- ./docs/data:/data
networks:
@ -176,6 +157,29 @@ networks:
driver: bridge
```
### Using the internal database (only for testing)
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
# Manual installation
## Requirements

View File

@ -5,8 +5,8 @@
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.11</version>
<relativePath>..</relativePath>
<version>1.12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -17,20 +17,10 @@
<dependencies>
<!-- Persistence layer dependencies -->
<dependency>
<groupId>org.hibernate</groupId>
<groupId>org.hibernate.orm</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>joda-time</groupId>
@ -48,8 +38,8 @@
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
@ -63,8 +53,8 @@
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.json</artifactId>
<groupId>jakarta.json</groupId>
<artifactId>jakarta.json-api</artifactId>
</dependency>
<dependency>
@ -122,11 +112,6 @@
<artifactId>lucene-highlighter</artifactId>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
@ -134,7 +119,12 @@
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
<artifactId>api-ldap-client-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them -->
@ -195,25 +185,6 @@
<artifactId>postgresql</artifactId>
</dependency>
<!-- JDK 11 JAXB dependencies -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-core</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-impl</artifactId>
<version>2.3.0</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>

View File

@ -20,6 +20,11 @@ public enum ConfigType {
*/
GUEST_LOGIN,
/**
* OCR enabled.
*/
OCR_ENABLED,
/**
* Default language.
*/
@ -40,6 +45,7 @@ public enum ConfigType {
INBOX_ENABLED,
INBOX_HOSTNAME,
INBOX_PORT,
INBOX_STARTTLS,
INBOX_USERNAME,
INBOX_PASSWORD,
INBOX_FOLDER,
@ -53,6 +59,7 @@ public enum ConfigType {
LDAP_ENABLED,
LDAP_HOST,
LDAP_PORT,
LDAP_USESSL,
LDAP_ADMIN_DN,
LDAP_ADMIN_PASSWORD,
LDAP_BASE_DN,

View File

@ -43,7 +43,7 @@ public class Constants {
/**
* Supported document languages.
*/
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces", "sqi");
/**
* Base URL environment variable.

View File

@ -10,8 +10,8 @@ import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.SecurityUtil;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

View File

@ -12,7 +12,7 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import jakarta.persistence.EntityManager;
import java.sql.Timestamp;
import java.util.*;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.Date;
import java.util.List;
import java.util.UUID;

View File

@ -6,9 +6,9 @@ import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
/**
* Configuration parameter DAO.

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

View File

@ -7,10 +7,10 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
@ -87,7 +87,7 @@ public class DocumentDao {
}
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, ");
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, d.DOC_IDFILE_C,");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, ");
sb.append(" u.USE_USERNAME_C ");
@ -121,6 +121,7 @@ public class DocumentDao {
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setUpdateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
documentDto.setFileId((String) o[i++]);
documentDto.setShared(((Number) o[i++]).intValue() > 0);
documentDto.setFileCount(((Number) o[i++]).intValue());
documentDto.setCreator((String) o[i]);

View File

@ -5,8 +5,8 @@ import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

View File

@ -4,13 +4,16 @@ import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.TypedQuery;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
@ -160,6 +163,7 @@ public class FileDao {
fileDb.setMimeType(file.getMimeType());
fileDb.setVersionId(file.getVersionId());
fileDb.setLatestVersion(file.isLatestVersion());
fileDb.setSize(file.getSize());
return file;
}
@ -212,6 +216,24 @@ public class FileDao {
return q.getResultList();
}
/**
* Get files count by documents IDs.
*
* @param documentIds Documents IDs
* @return the number of files per document id
*/
public Map<String, Long> countByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f.documentId, count(*) from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null group by (f.documentId)");
q.setParameter("documentIds", documentIds);
Map<String, Long> result = new HashMap<>();
q.getResultList().forEach(o -> {
Object[] resultLine = (Object[]) o;
result.put((String) resultLine[0], (Long) resultLine[1]);
});
return result;
}
/**
* Get all files from a version.
*
@ -224,4 +246,12 @@ public class FileDao {
q.setParameter("versionId", versionId);
return q.getResultList();
}
public List<File> getFilesWithUnknownSize(int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.size = :size and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("size", File.UNKNOWN_SIZE);
q.setMaxResults(limit);
return q.getResultList();
}
}

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.util.*;
/**

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.util.*;
/**

View File

@ -6,9 +6,9 @@ import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import org.joda.time.DurationFieldType;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.util.Date;
import java.util.UUID;

View File

@ -4,8 +4,8 @@ import com.sismics.docs.core.dao.dto.RelationDto;
import com.sismics.docs.core.model.jpa.Relation;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.*;
/**

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.google.common.collect.Sets;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.Set;
/**

View File

@ -11,7 +11,7 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import jakarta.persistence.EntityManager;
import java.sql.Timestamp;
import java.util.*;

View File

@ -12,9 +12,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.sql.Timestamp;
import java.util.*;

View File

@ -12,8 +12,8 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.sql.Timestamp;
import java.util.*;

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Share;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import java.util.Date;
import java.util.UUID;

View File

@ -13,9 +13,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.util.*;
/**

View File

@ -19,9 +19,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.sql.Timestamp;
import java.util.*;

View File

@ -3,9 +3,9 @@ package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.util.List;
import java.util.UUID;

View File

@ -9,9 +9,9 @@ import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import java.sql.Timestamp;
import java.util.*;

View File

@ -19,7 +19,7 @@ public class DocumentCriteria {
/**
* Search query.
*/
private String search;
private String simpleSearch;
/**
* Full content search query.
@ -84,9 +84,9 @@ public class DocumentCriteria {
private String mimeType;
/**
* The title.
* Titles to include.
*/
private String title;
private List<String> titleList = new ArrayList<>();
public List<String> getTargetIdList() {
return targetIdList;
@ -96,12 +96,12 @@ public class DocumentCriteria {
this.targetIdList = targetIdList;
}
public String getSearch() {
return search;
public String getSimpleSearch() {
return simpleSearch;
}
public void setSearch(String search) {
this.search = search;
public void setSimpleSearch(String search) {
this.simpleSearch = search;
}
public String getFullSearch() {
@ -192,11 +192,7 @@ public class DocumentCriteria {
this.mimeType = mimeType;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
public List<String> getTitleList() {
return titleList;
}
}

View File

@ -3,8 +3,8 @@ package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.util.JsonUtil;
import javax.json.Json;
import javax.json.JsonObjectBuilder;
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
/**
* Route step DTO.

View File

@ -13,6 +13,8 @@ public class FileDeletedAsyncEvent extends UserEvent {
*/
private String fileId;
private Long fileSize;
public String getFileId() {
return fileId;
}
@ -21,10 +23,19 @@ public class FileDeletedAsyncEvent extends UserEvent {
this.fileId = fileId;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("fileId", fileId)
.add("fileSize", fileSize)
.toString();
}
}

View File

@ -2,8 +2,11 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
@ -32,6 +35,24 @@ public class FileDeletedAsyncListener {
if (log.isInfoEnabled()) {
log.info("File deleted event: " + event.toString());
}
TransactionUtil.handle(() -> {
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId());
if (user != null) {
Long fileSize = event.getFileSize();
if (fileSize.equals(File.UNKNOWN_SIZE)) {
// The file size was not in the database, in this case we need to get from the unencrypted size.
fileSize = FileUtil.getFileSize(event.getFileId(), user);
}
if (! fileSize.equals(File.UNKNOWN_SIZE)) {
user.setStorageCurrent(user.getStorageCurrent() - fileSize);
userDao.updateQuota(user);
}
}
});
// Delete the file from storage
FileUtil.delete(event.getFileId());

View File

@ -9,6 +9,7 @@ import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService;
import com.sismics.docs.core.service.FileSizeService;
import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.indexing.IndexingHandler;
@ -65,6 +66,11 @@ public class AppContext {
*/
private FileService fileService;
/**
* File size service.
*/
private FileSizeService fileSizeService;
/**
* Asynchronous executors.
*/
@ -102,6 +108,11 @@ public class AppContext {
inboxService.startAsync();
inboxService.awaitRunning();
// Start file size service
fileSizeService = new FileSizeService();
fileSizeService.startAsync();
fileSizeService.awaitRunning();
// Register fonts
PdfUtil.registerFonts();
@ -238,6 +249,10 @@ public class AppContext {
fileService.stopAsync();
}
if (fileSizeService != null) {
fileSizeService.stopAsync();
}
instance = null;
}
}

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import javax.persistence.*;
import jakarta.persistence.*;
import java.util.Date;
/**

View File

@ -2,12 +2,12 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.AuditLogType;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,11 +1,11 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.ConfigType;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.io.Serializable;
/**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.sismics.util.mime.MimeTypeUtil;
import javax.persistence.*;
import jakarta.persistence.*;
import java.util.Date;
/**
@ -88,6 +88,14 @@ public class File implements Loggable {
@Column(name = "FIL_LATESTVERSION_B", nullable = false)
private boolean latestVersion;
public static final Long UNKNOWN_SIZE = -1L;
/**
* Can be {@link File#UNKNOWN_SIZE} if the size has not been stored in the database when the file has been uploaded
*/
@Column(name = "FIL_SIZE_N", nullable = false)
private Long size;
/**
* Private key to decrypt the file.
* Not saved to database, of course.
@ -204,6 +212,18 @@ public class File implements Loggable {
return this;
}
/**
* Can return {@link File#UNKNOWN_SIZE} if the file size is not stored in the database.
*/
public Long getSize() {
return size;
}
public File setSize(Long size) {
this.size = size;
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.MetadataType;
import javax.persistence.*;
import jakarta.persistence.*;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.Date;
/**

View File

@ -4,7 +4,7 @@ import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.RouteStepTransition;
import com.sismics.docs.core.constant.RouteStepType;
import javax.persistence.*;
import jakarta.persistence.*;
import java.util.Date;
/**

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import java.util.Date;
/**

View File

@ -3,10 +3,10 @@ package com.sismics.docs.core.model.jpa;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import com.google.common.base.MoreObjects;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.WebhookEvent;
import javax.persistence.*;
import jakarta.persistence.*;
import java.util.Date;
/**

View File

@ -0,0 +1,78 @@
package com.sismics.docs.core.service;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Service that retrieve files sizes when they are not in the database.
*/
public class FileSizeService extends AbstractScheduledService {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileSizeService.class);
public FileSizeService() {
}
@Override
protected void startUp() {
log.info("File size service starting up");
}
@Override
protected void shutDown() {
log.info("File size service shutting down");
}
private static final int BATCH_SIZE = 30;
@Override
protected void runOneIteration() {
try {
TransactionUtil.handle(() -> {
FileDao fileDao = new FileDao();
List<File> files = fileDao.getFilesWithUnknownSize(BATCH_SIZE);
for(File file : files) {
processFile(file);
}
if(files.size() < BATCH_SIZE) {
log.info("No more file to process, stopping the service");
stopAsync();
}
});
} catch (Throwable e) {
log.error("Exception during file service iteration", e);
}
}
void processFile(File file) {
UserDao userDao = new UserDao();
User user = userDao.getById(file.getUserId());
if(user == null) {
return;
}
long fileSize = FileUtil.getFileSize(file.getId(), user);
if(fileSize != File.UNKNOWN_SIZE){
FileDao fileDao = new FileDao();
file.setSize(fileSize);
fileDao.update(file);
}
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedDelaySchedule(0, 1, TimeUnit.MINUTES);
}
}

View File

@ -1,14 +1,11 @@
package com.sismics.docs.core.service;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.ConfigUtil;
@ -18,7 +15,7 @@ import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.EmailUtil;
import com.sismics.util.context.ThreadLocalContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -159,6 +156,7 @@ public class InboxService extends AbstractScheduledService {
String port = ConfigUtil.getConfigStringValue(ConfigType.INBOX_PORT);
properties.put("mail.imap.host", ConfigUtil.getConfigStringValue(ConfigType.INBOX_HOSTNAME));
properties.put("mail.imap.port", port);
properties.setProperty("mail.imap.starttls.enable", ConfigUtil.getConfigStringValue(ConfigType.INBOX_STARTTLS).toString());
boolean isSsl = "993".equals(port);
properties.put("mail.imap.ssl.enable", String.valueOf(isSsl));
properties.setProperty("mail.imap.socketFactory.class",

View File

@ -9,7 +9,7 @@ import com.sismics.docs.core.util.action.RemoveTagAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
/**
* Action utilities.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.model.jpa.AuditLog;
import com.sismics.docs.core.model.jpa.Loggable;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import jakarta.persistence.EntityManager;
/**
* Audit log utilities.

View File

@ -9,7 +9,6 @@ import java.util.ResourceBundle;
/**
* Configuration parameter utilities.
*
* @author jtremeaux
*/
public class ConfigUtil {
/**
@ -72,7 +71,21 @@ public class ConfigUtil {
*/
public static boolean getConfigBooleanValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Boolean.parseBoolean(value);
}
/**
* Returns the boolean value of a configuration parameter with a default value.
*
* @param configType Type of the configuration parameter
* @param defaultValue Default value to return if the configuration parameter is undefined
* @return Boolean value of the configuration parameter
*/
public static boolean getConfigBooleanValue(ConfigType configType, boolean defaultValue) {
try {
return getConfigBooleanValue(configType);
} catch (IllegalStateException e) {
return defaultValue;
}
}
}

View File

@ -5,7 +5,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import com.sismics.util.EnvironmentUtil;

View File

@ -16,7 +16,10 @@ import com.sismics.util.Scalr;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.CountingInputStream;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -46,7 +49,7 @@ public class FileUtil {
/**
* File ID of files currently being processed.
*/
private static Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
private static final Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
/**
* Optical character recognition on an image.
@ -149,6 +152,7 @@ public class FileUtil {
file.setName(StringUtils.abbreviate(name, 200));
file.setMimeType(mimeType);
file.setUserId(userId);
file.setSize(fileSize);
// Get files of this document
FileDao fileDao = new FileDao();
@ -240,4 +244,31 @@ public class FileUtil {
public static boolean isProcessingFile(String fileId) {
return processingFileSet.contains(fileId);
}
/**
* Get the size of a file on disk.
*
* @param fileId the file id
* @param user the file owner
* @return the size or -1 if something went wrong
*/
public static long getFileSize(String fileId, User user) {
// To get the size we copy the decrypted content into a null output stream
// and count the copied byte size.
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
if (! Files.exists(storedFile)) {
log.debug("File does not exist " + fileId);
return File.UNKNOWN_SIZE;
}
try (InputStream fileInputStream = Files.newInputStream(storedFile);
InputStream inputStream = EncryptionUtil.decryptInputStream(fileInputStream, user.getPrivateKey());
CountingInputStream countingInputStream = new CountingInputStream(inputStream);
) {
IOUtils.copy(countingInputStream, NullOutputStream.NULL_OUTPUT_STREAM);
return countingInputStream.getByteCount();
} catch (Exception e) {
log.debug("Can't find size of file " + fileId, e);
return File.UNKNOWN_SIZE;
}
}
}

View File

@ -10,9 +10,9 @@ import com.sismics.docs.core.dao.dto.MetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.docs.core.util.jpa.SortCriteria;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObjectBuilder;
import java.text.MessageFormat;
import java.util.List;
import java.util.Map;

View File

@ -19,10 +19,10 @@ import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.util.context.ThreadLocalContext;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonObject;
import jakarta.json.JsonReader;
import java.io.StringReader;
import java.util.List;

View File

@ -5,8 +5,8 @@ import com.sismics.util.jpa.EMF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
/**
* Database transaction utils.

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.util.action;
import com.sismics.docs.core.dao.dto.DocumentDto;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
/**
* Base action interface.

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
import java.util.List;
import java.util.Set;

View File

@ -13,7 +13,7 @@ import com.sismics.util.context.ThreadLocalContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
import java.nio.file.Path;
import java.util.List;

View File

@ -6,7 +6,7 @@ import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
import java.util.List;
import java.util.Set;

View File

@ -4,7 +4,7 @@ import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import jakarta.json.JsonObject;
import java.util.List;
/**

View File

@ -13,10 +13,9 @@ import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.DefaultLdapConnectionFactory;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapConnectionPool;
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -35,59 +34,41 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/**
* LDAP connection pool.
* Get a LDAP connection.
* @return LdapConnection
*/
private static LdapConnectionPool pool;
/**
* Reset the LDAP pool.
*/
public static void reset() {
if (pool != null) {
try {
pool.close();
} catch (Exception e) {
// NOP
}
}
pool = null;
}
/**
* Initialize the LDAP pool.
*/
private static void init() {
private LdapConnection getConnection() {
ConfigDao configDao = new ConfigDao();
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
if (pool != null || ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return;
if (ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return null;
}
LdapConnectionConfig config = new LdapConnectionConfig();
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
config.setUseSsl(ConfigUtil.getConfigBooleanValue(ConfigType.LDAP_USESSL));
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config);
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), null);
return new LdapNetworkConnection(config);
}
@Override
public User authenticate(String username, String password) {
init();
if (pool == null) {
return null;
}
// Fetch and authenticate the user
Entry userEntry;
try {
EntryCursor cursor = pool.getConnection().search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
try (LdapConnection ldapConnection = getConnection()) {
if (ldapConnection == null) {
return null;
}
ldapConnection.bind();
EntryCursor cursor = ldapConnection.search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
if (cursor.next()) {
userEntry = cursor.get();
pool.getConnection().bind(userEntry.getDn(), password);
ldapConnection.bind(userEntry.getDn(), password);
} else {
// User not found
return null;

View File

@ -3,7 +3,6 @@ package com.sismics.docs.core.util.format;
import com.google.common.collect.Lists;
import com.sismics.util.ClasspathScanner;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
/**

View File

@ -3,6 +3,8 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.pdmodel.PDDocument;
@ -22,7 +24,6 @@ import java.nio.file.Path;
/**
* Image format handler.
*
* @author bgamard
*/
public class ImageFormatHandler implements FormatHandler {
/**
@ -45,7 +46,7 @@ public class ImageFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
if (language == null) {
if (language == null || !ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
return null;
}

View File

@ -2,6 +2,8 @@ package com.sismics.docs.core.util.format;
import com.google.common.io.Closer;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.multipdf.PDFMergerUtility;
@ -53,7 +55,7 @@ public class PdfFormatHandler implements FormatHandler {
}
// No text content, try to OCR it
if (language != null && content != null && content.trim().isEmpty()) {
if (language != null && content != null && content.trim().isEmpty() && ConfigUtil.getConfigBooleanValue(ConfigType.OCR_ENABLED, true)) {
StringBuilder sb = new StringBuilder();
try (InputStream inputStream = Files.newInputStream(file);
PDDocument pdfDocument = PDDocument.load(inputStream)) {

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.io.Closer;
import com.lowagie.text.*;
import com.lowagie.text.pdf.PdfWriter;

View File

@ -26,9 +26,18 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.simple.SimpleQueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
@ -47,7 +56,12 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Timestamp;
import java.util.*;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Lucene indexing handler.
@ -242,34 +256,28 @@ public class LuceneIndexingHandler implements IndexingHandler {
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, ");
sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D is null group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias
if (criteria.getTargetIdList() != null) {
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
parameterMap.put("targetIdList", criteria.getTargetIdList());
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSearch(), criteria.getFullSearch());
parameterMap.put("targetIdList", criteria.getTargetIdList());
if (!Strings.isNullOrEmpty(criteria.getSimpleSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSimpleSearch(), criteria.getFullSearch());
if (documentSearchMap.isEmpty()) {
// If the search doesn't find any document, the request should return nothing
documentSearchMap.put(UUID.randomUUID().toString(), null);
@ -295,9 +303,9 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
}
if (criteria.getTitle() != null) {
criteriaList.add("d.DOC_TITLE_C = :title");
parameterMap.put("title", criteria.getTitle());
if (!criteria.getTitleList().isEmpty()) {
criteriaList.add("d.DOC_TITLE_C in :title");
parameterMap.put("title", criteria.getTitleList());
}
if (!criteria.getTagIdList().isEmpty()) {
int index = 0;
@ -312,7 +320,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
}
}
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) {
if (!criteria.getExcludedTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();
@ -347,10 +355,8 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
@ -369,8 +375,6 @@ public class LuceneIndexingHandler implements IndexingHandler {
documentDto.setFileId((String) o[i++]);
Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
@ -408,14 +412,14 @@ public class LuceneIndexingHandler implements IndexingHandler {
/**
* Fulltext search in files and documents.
*
* @param searchQuery Search query on metadatas
* @param simpleSearchQuery Search query on metadatas
* @param fullSearchQuery Search query on all fields
* @return Map of document IDs as key and highlight as value
* @throws Exception e
*/
private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception {
private Map<String, String> search(String simpleSearchQuery, String fullSearchQuery) throws Exception {
// The fulltext query searches in all fields
searchQuery = searchQuery + " " + fullSearchQuery;
String searchQuery = simpleSearchQuery + " " + fullSearchQuery;
// Build search query
Analyzer analyzer = new StandardAnalyzer();

View File

@ -1,6 +1,6 @@
package com.sismics.docs.core.util.jpa;
import javax.persistence.Query;
import jakarta.persistence.Query;
import java.util.List;
/**
@ -68,7 +68,7 @@ public class PaginatedLists {
}
/**
* Executes a query and returns the data of the currunt page.
* Executes a query and returns the data of the current page.
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
@ -83,18 +83,6 @@ public class PaginatedLists {
return q.getResultList();
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
* @return List of results
*/
public static <E> List<Object[]> executePaginatedQuery(PaginatedList<E> paginatedList, QueryParam queryParam) {
executeCountQuery(paginatedList, queryParam);
return executeResultQuery(paginatedList, queryParam);
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).
*

View File

@ -2,8 +2,8 @@ package com.sismics.docs.core.util.jpa;
import java.util.Map.Entry;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Query;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -17,9 +17,9 @@ import org.jsoup.Jsoup;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
import jakarta.json.Json;
import jakarta.json.JsonObject;
import jakarta.json.JsonReader;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Multipart;

View File

@ -1,7 +1,7 @@
package com.sismics.util;
import javax.json.Json;
import javax.json.JsonValue;
import jakarta.json.Json;
import jakarta.json.JsonValue;
/**
* JSON utilities.

View File

@ -3,7 +3,7 @@ package com.sismics.util.context;
import com.google.common.collect.Lists;
import com.sismics.docs.core.model.context.AppContext;
import javax.persistence.EntityManager;
import jakarta.persistence.EntityManager;
import java.util.Iterator;
import java.util.List;

View File

@ -182,9 +182,9 @@ abstract class DbOpenHelper {
public abstract void onUpgrade(int oldVersion, int newVersion) throws Exception;
/**
* Returns a List of all Exceptions which occured during the export.
* Returns a List of all Exceptions which occurred during the export.
*
* @return A List containig the Exceptions occured during the export
* @return A List containing the Exceptions occurred during the export
*/
public List<?> getExceptions() {
return exceptions;

View File

@ -8,8 +8,8 @@ import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.Persistence;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
@ -26,7 +26,7 @@ import java.util.Properties;
public final class EMF {
private static final Logger log = LoggerFactory.getLogger(EMF.class);
private static Map<Object, Object> properties;
private static Properties properties;
private static EntityManagerFactory emfInstance;
@ -59,7 +59,7 @@ public final class EMF {
}
}
private static Map<Object, Object> getEntityManagerProperties() {
private static Properties getEntityManagerProperties() {
// Use properties file if exists
try {
URL hibernatePropertiesUrl = EMF.class.getResource("/hibernate.properties");
@ -79,19 +79,24 @@ public final class EMF {
String databaseUrl = System.getenv("DATABASE_URL");
String databaseUsername = System.getenv("DATABASE_USER");
String databasePassword = System.getenv("DATABASE_PASSWORD");
String databasePoolSize = System.getenv("DATABASE_POOL_SIZE");
if(databasePoolSize == null) {
databasePoolSize = "10";
}
log.info("Configuring EntityManager from environment parameters");
Map<Object, Object> props = new HashMap<>();
Properties props = new Properties();
Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
if (Strings.isNullOrEmpty(databaseUrl)) {
log.warn("Using an embedded H2 database. Only suitable for testing purpose, not for production!");
props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");
props.put("hibernate.connection.username", "sa");
} else {
props.put("hibernate.connection.driver_class", "org.postgresql.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQL94Dialect");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
props.put("hibernate.connection.url", databaseUrl);
props.put("hibernate.connection.username", databaseUsername);
props.put("hibernate.connection.password", databasePassword);
@ -101,12 +106,9 @@ public final class EMF {
props.put("hibernate.format_sql", "false");
props.put("hibernate.max_fetch_depth", "5");
props.put("hibernate.cache.use_second_level_cache", "false");
props.put("hibernate.c3p0.min_size", "1");
props.put("hibernate.c3p0.max_size", "10");
props.put("hibernate.c3p0.timeout", "5000");
props.put("hibernate.c3p0.max_statements", "0");
props.put("hibernate.c3p0.acquire_increment", "1");
props.put("hibernate.c3p0.idle_test_period", "10");
props.put("hibernate.connection.initial_pool_size", "1");
props.put("hibernate.connection.pool_size", databasePoolSize);
props.put("hibernate.connection.pool_validation_interval", "5");
return props;
}

View File

@ -1,6 +1,6 @@
package com.sismics.util.log4j;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Level;
/**

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<persistence xmlns="http://java.sun.com/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd"
version="2.0">
<persistence xmlns="https://jakarta.ee/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://jakarta.ee/xml/ns/persistence https://jakarta.ee/xml/ns/persistence/persistence_3_0.xsd"
version="3.0">
<persistence-unit name="transactions-optional" transaction-type="RESOURCE_LOCAL">
<provider>org.hibernate.jpa.HibernatePersistenceProvider</provider>
</persistence-unit>

View File

@ -1 +1 @@
db.version=27
db.version=31

View File

@ -0,0 +1,2 @@
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_STARTTLS', 'false');
update T_CONFIG set CFG_VALUE_C = '28' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,2 @@
alter table T_FILE add column FIL_SIZE_N bigint not null default -1;
update T_CONFIG set CFG_VALUE_C = '29' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,2 @@
create index IDX_FIL_IDDOC_C ON T_FILE (FIL_IDDOC_C ASC);
update T_CONFIG set CFG_VALUE_C = '30' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,7 @@
-- DBUPDATE-031-0.SQL
-- Insert a new setting for OCR recognition
insert into T_CONFIG (CFG_ID_C, CFG_VALUE_C) values ('OCR_ENABLED', 'true');
-- Update the database version
update T_CONFIG set CFG_VALUE_C = '31' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,49 @@
package com.sismics;
import java.io.InputStream;
import java.net.URL;
public abstract class BaseTest {
protected static final String FILE_CSV = "document.csv";
protected static final String FILE_DOCX = "document.docx";
protected static final String FILE_GIF = "image.gif";
protected static final String FILE_JPG = "apollo_portrait.jpg";
protected static final Long FILE_JPG_SIZE = 7_907L;
protected static final String FILE_JPG2 = "apollo_landscape.jpg";
protected static final String FILE_MP4 = "video.mp4";
protected static final String FILE_ODT = "document.odt";
protected static final String FILE_PDF = "udhr.pdf";
protected static final String FILE_PDF_ENCRYPTED = "udhr_encrypted.pdf";
protected static final String FILE_PDF_SCANNED = "scanned.pdf";
protected static final String FILE_PNG = "image.png";
protected static final String FILE_PPTX = "apache.pptx";
protected static final String FILE_TXT = "document.txt";
protected static final String FILE_WEBM = "video.webm";
protected static final String FILE_XLSX = "document.xlsx";
protected static final String FILE_ZIP = "document.zip";
protected static URL getResource(String fileName) {
return ClassLoader.getSystemResource("file/" + fileName);
}
protected static InputStream getSystemResourceAsStream(String fileName) {
return ClassLoader.getSystemResourceAsStream("file/" + fileName);
}
}

View File

@ -1,21 +1,36 @@
package com.sismics.docs;
import com.sismics.BaseTest;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.EncryptionUtil;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.jpa.EMF;
import com.sismics.util.mime.MimeType;
import org.junit.After;
import org.junit.Before;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import java.io.InputStream;
import java.nio.file.Files;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
/**
* Base class of tests with a transactional context.
*
* @author jtremeaux
*/
public abstract class BaseTransactionalTest {
public abstract class BaseTransactionalTest extends BaseTest {
@Before
public void setUp() throws Exception {
public void setUp() {
// Initialize the entity manager
EntityManager em = EMF.get().createEntityManager();
ThreadLocalContext context = ThreadLocalContext.get();
@ -25,6 +40,35 @@ public abstract class BaseTransactionalTest {
}
@After
public void tearDown() throws Exception {
public void tearDown() {
ThreadLocalContext.get().getEntityManager().getTransaction().rollback();
}
protected User createUser(String userName) throws Exception {
UserDao userDao = new UserDao();
User user = new User();
user.setUsername(userName);
user.setPassword("12345678");
user.setEmail("toto@docs.com");
user.setRoleId("admin");
user.setStorageQuota(100_000L);
userDao.create(user, userName);
return user;
}
protected File createFile(User user, long fileSize) throws Exception {
FileDao fileDao = new FileDao();
try(InputStream inputStream = getSystemResourceAsStream(FILE_JPG)) {
File file = new File();
file.setId("apollo_portrait");
file.setUserId(user.getId());
file.setVersion(0);
file.setMimeType(MimeType.IMAGE_JPEG);
file.setSize(fileSize);
String fileId = fileDao.create(file, user.getId());
Cipher cipher = EncryptionUtil.getEncryptionCipher(user.getPrivateKey());
Files.copy(new CipherInputStream(inputStream, cipher), DirectoryUtil.getStorageDirectory().resolve(fileId), REPLACE_EXISTING);
return file;
}
}
}

View File

@ -18,22 +18,20 @@ public class TestJpa extends BaseTransactionalTest {
public void testJpa() throws Exception {
// Create a user
UserDao userDao = new UserDao();
User user = new User();
user.setUsername("username");
user.setPassword("12345678");
user.setEmail("toto@docs.com");
user.setRoleId("admin");
user.setStorageQuota(10L);
String id = userDao.create(user, "me");
User user = createUser("testJpa");
TransactionUtil.commit();
// Search a user by his ID
user = userDao.getById(id);
user = userDao.getById(user.getId());
Assert.assertNotNull(user);
Assert.assertEquals("toto@docs.com", user.getEmail());
// Authenticate using the database
Assert.assertNotNull(new InternalAuthenticationHandler().authenticate("username", "12345678"));
Assert.assertNotNull(new InternalAuthenticationHandler().authenticate("testJpa", "12345678"));
// Delete the created user
userDao.delete("testJpa", user.getId());
TransactionUtil.commit();
}
}

View File

@ -0,0 +1,52 @@
package com.sismics.docs.core.listener.async;
import com.sismics.docs.BaseTransactionalTest;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.TransactionUtil;
import org.junit.Assert;
import org.junit.Test;
public class FileDeletedAsyncListenerTest extends BaseTransactionalTest {
@Test
public void updateQuotaSizeKnown() throws Exception {
User user = createUser("updateQuotaSizeKnown");
File file = createFile(user, FILE_JPG_SIZE);
UserDao userDao = new UserDao();
user = userDao.getById(user.getId());
user.setStorageCurrent(10_000L);
userDao.updateQuota(user);
FileDeletedAsyncListener fileDeletedAsyncListener = new FileDeletedAsyncListener();
TransactionUtil.commit();
FileDeletedAsyncEvent event = new FileDeletedAsyncEvent();
event.setFileSize(FILE_JPG_SIZE);
event.setFileId(file.getId());
event.setUserId(user.getId());
fileDeletedAsyncListener.on(event);
Assert.assertEquals(userDao.getById(user.getId()).getStorageCurrent(), Long.valueOf(10_000 - FILE_JPG_SIZE));
}
@Test
public void updateQuotaSizeUnknown() throws Exception {
User user = createUser("updateQuotaSizeUnknown");
File file = createFile(user, File.UNKNOWN_SIZE);
UserDao userDao = new UserDao();
user = userDao.getById(user.getId());
user.setStorageCurrent(10_000L);
userDao.updateQuota(user);
FileDeletedAsyncListener fileDeletedAsyncListener = new FileDeletedAsyncListener();
TransactionUtil.commit();
FileDeletedAsyncEvent event = new FileDeletedAsyncEvent();
event.setFileSize(FILE_JPG_SIZE);
event.setFileId(file.getId());
event.setUserId(user.getId());
fileDeletedAsyncListener.on(event);
Assert.assertEquals(userDao.getById(user.getId()).getStorageCurrent(), Long.valueOf(10_000 - FILE_JPG_SIZE));
}
}

View File

@ -0,0 +1,22 @@
package com.sismics.docs.core.service;
import com.sismics.docs.BaseTransactionalTest;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import org.junit.Assert;
import org.junit.Test;
public class TestFileSizeService extends BaseTransactionalTest {
@Test
public void processFileTest() throws Exception {
User user = createUser("processFileTest");
FileDao fileDao = new FileDao();
File file = createFile(user, File.UNKNOWN_SIZE);
FileSizeService fileSizeService = new FileSizeService();
fileSizeService.processFile(file);
Assert.assertEquals(fileDao.getFile(file.getId()).getSize(), Long.valueOf(FILE_JPG_SIZE));
}
}

View File

@ -2,6 +2,7 @@ package com.sismics.docs.core.util;
import com.google.common.base.Strings;
import com.google.common.io.ByteStreams;
import com.sismics.BaseTest;
import org.junit.Assert;
import org.junit.Test;
@ -14,7 +15,7 @@ import java.io.InputStream;
*
* @author bgamard
*/
public class TestEncryptUtil {
public class TestEncryptUtil extends BaseTest {
@Test
public void generatePrivateKeyTest() {
String key = EncryptionUtil.generatePrivateKey();
@ -31,9 +32,9 @@ public class TestEncryptUtil {
// NOP
}
Cipher cipher = EncryptionUtil.getEncryptionCipher("OnceUponATime");
InputStream inputStream = new CipherInputStream(this.getClass().getResourceAsStream("/file/udhr.pdf"), cipher);
InputStream inputStream = new CipherInputStream(getSystemResourceAsStream(FILE_PDF), cipher);
byte[] encryptedData = ByteStreams.toByteArray(inputStream);
byte[] assertData = ByteStreams.toByteArray(this.getClass().getResourceAsStream("/file/udhr_encrypted.pdf"));
byte[] assertData = ByteStreams.toByteArray(getSystemResourceAsStream(FILE_PDF_ENCRYPTED));
Assert.assertEquals(encryptedData.length, assertData.length);
}
@ -41,9 +42,9 @@ public class TestEncryptUtil {
@Test
public void decryptStreamTest() throws Exception {
InputStream inputStream = EncryptionUtil.decryptInputStream(
this.getClass().getResourceAsStream("/file/udhr_encrypted.pdf"), "OnceUponATime");
getSystemResourceAsStream(FILE_PDF_ENCRYPTED), "OnceUponATime");
byte[] encryptedData = ByteStreams.toByteArray(inputStream);
byte[] assertData = ByteStreams.toByteArray(this.getClass().getResourceAsStream("/file/udhr.pdf"));
byte[] assertData = ByteStreams.toByteArray(getSystemResourceAsStream(FILE_PDF));
Assert.assertEquals(encryptedData.length, assertData.length);
}

Some files were not shown because too many files have changed in this diff Show More