Compare commits

...

13 Commits

Author SHA1 Message Date
Erich Mauerböck 45e00ac93d
add explicit binding (#735)
* add explicit binding

* fixup building on windows

* reactivate unit test

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:58:11 +01:00
Erich Mauerböck 80454afc0d
fix unit test (#736)
Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-10 20:57:33 +01:00
Erich Mauerböck 428e898a7a
allow hyphen in username (#731)
* allow hyphen in username

* remove extra escaping

---------

Co-authored-by: Enrice <erich.mauerboeck@ergo-versicherung.at>
2023-11-03 15:36:02 +01:00
Julien Kirch 13762eb67f
Upgrade pdfbox version to 2.0.29 (#728) 2023-10-20 15:41:45 +02:00
Julien Kirch 04c43ebf7b
Specify document search parameter as HTTP params (#722) 2023-10-19 18:34:04 +02:00
Julien Kirch f9b5a5212d
Allow to specify a pool size (#727) 2023-10-09 14:05:13 +02:00
Julien Kirch 0351f94761
Upgrade Hibernate version (#726) 2023-10-09 12:36:53 +02:00
Julien Kirch a89543b555
Make search for documents faster for large dataset (#698) 2023-10-08 22:07:01 +02:00
Benjamin Gamard ce30b1a6ff
fix build 2023-09-15 22:05:04 +02:00
Orland Karamani 1b382004cb
Albanian Language Support (#719)
Co-authored-by: Orlando Karamani <orlandothemover@gmail.com>
2023-09-14 16:51:11 +02:00
Julien Kirch ab7ff25929
Store file size in DB (#704) 2023-09-14 16:50:39 +02:00
Julien Kirch eedf19ad9d
Fix no favicon on shares #580 (#718) 2023-09-08 15:43:35 +02:00
Julien Kirch 941ace99c6
Fix typo in /file/:id/versions description (#717) 2023-09-07 16:46:43 +02:00
67 changed files with 2688 additions and 725 deletions

View File

@ -19,7 +19,7 @@ jobs:
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
run: sudo apt-get update && sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn -Pprod clean install
- name: Upload war artifact

View File

@ -31,7 +31,8 @@ RUN apt-get update && \
tesseract-ocr-tha \
tesseract-ocr-tur \
tesseract-ocr-ukr \
tesseract-ocr-vie && \
tesseract-ocr-vie \
tesseract-ocr-sqi && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \
mkdir /app && \
cd /app && \

View File

@ -81,6 +81,7 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
- `DATABASE_USER`: The user which should be used for the database connection.
- `DATABASE_PASSWORD`: The password to be used for the database connection.
- `DATABASE_POOL_SIZE`: The pool size to be used for the database connection.
- Language
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
@ -122,6 +123,7 @@ services:
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
DATABASE_USER: "teedy_db_user"
DATABASE_PASSWORD: "teedy_db_password"
DATABASE_POOL_SIZE: "10"
volumes:
- ./docs/data:/data
networks:

View File

@ -8,7 +8,7 @@
<version>1.12-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>docs-core</artifactId>
<packaging>jar</packaging>
@ -17,10 +17,10 @@
<dependencies>
<!-- Persistence layer dependencies -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-jakarta</artifactId>
<groupId>org.hibernate.orm</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>joda-time</groupId>
@ -31,12 +31,12 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
@ -46,7 +46,7 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
@ -66,17 +66,17 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
@ -86,17 +86,17 @@
<groupId>at.favre.lib</groupId>
<artifactId>bcrypt</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
@ -119,7 +119,12 @@
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
<artifactId>api-ldap-client-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them -->
@ -127,22 +132,22 @@
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
</dependency>
<dependency>
<groupId>org.imgscalr</groupId>
<artifactId>imgscalr-lib</artifactId>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</dependency>
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.odfdom.converter.pdf</artifactId>
@ -186,14 +191,14 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Development profile (active by default) -->
<profile>
@ -205,7 +210,7 @@
<value>dev</value>
</property>
</activation>
<build>
<resources>
<resource>
@ -221,7 +226,7 @@
<id>prod</id>
</profile>
</profiles>
<build>
<resources>
<resource>

View File

@ -43,7 +43,7 @@ public class Constants {
/**
* Supported document languages.
*/
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces", "sqi");
/**
* Base URL environment variable.

View File

@ -87,7 +87,7 @@ public class DocumentDao {
}
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, ");
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, d.DOC_IDFILE_C,");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, ");
sb.append(" u.USE_USERNAME_C ");
@ -121,6 +121,7 @@ public class DocumentDao {
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setUpdateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
documentDto.setFileId((String) o[i++]);
documentDto.setShared(((Number) o[i++]).intValue() > 0);
documentDto.setFileCount(((Number) o[i++]).intValue());
documentDto.setCreator((String) o[i]);

View File

@ -4,13 +4,16 @@ import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.Query;
import jakarta.persistence.TypedQuery;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
@ -160,6 +163,7 @@ public class FileDao {
fileDb.setMimeType(file.getMimeType());
fileDb.setVersionId(file.getVersionId());
fileDb.setLatestVersion(file.isLatestVersion());
fileDb.setSize(file.getSize());
return file;
}
@ -212,6 +216,24 @@ public class FileDao {
return q.getResultList();
}
/**
* Get files count by documents IDs.
*
* @param documentIds Documents IDs
* @return the number of files per document id
*/
public Map<String, Long> countByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f.documentId, count(*) from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null group by (f.documentId)");
q.setParameter("documentIds", documentIds);
Map<String, Long> result = new HashMap<>();
q.getResultList().forEach(o -> {
Object[] resultLine = (Object[]) o;
result.put((String) resultLine[0], (Long) resultLine[1]);
});
return result;
}
/**
* Get all files from a version.
*
@ -224,4 +246,12 @@ public class FileDao {
q.setParameter("versionId", versionId);
return q.getResultList();
}
public List<File> getFilesWithUnknownSize(int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.size = :size and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("size", File.UNKNOWN_SIZE);
q.setMaxResults(limit);
return q.getResultList();
}
}

View File

@ -19,7 +19,7 @@ public class DocumentCriteria {
/**
* Search query.
*/
private String search;
private String simpleSearch;
/**
* Full content search query.
@ -96,12 +96,12 @@ public class DocumentCriteria {
this.targetIdList = targetIdList;
}
public String getSearch() {
return search;
public String getSimpleSearch() {
return simpleSearch;
}
public void setSearch(String search) {
this.search = search;
public void setSimpleSearch(String search) {
this.simpleSearch = search;
}
public String getFullSearch() {

View File

@ -13,6 +13,8 @@ public class FileDeletedAsyncEvent extends UserEvent {
*/
private String fileId;
private Long fileSize;
public String getFileId() {
return fileId;
}
@ -21,10 +23,19 @@ public class FileDeletedAsyncEvent extends UserEvent {
this.fileId = fileId;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("fileId", fileId)
.add("fileSize", fileSize)
.toString();
}
}
}

View File

@ -2,8 +2,11 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
@ -11,7 +14,7 @@ import org.slf4j.LoggerFactory;
/**
* Listener on file deleted.
*
*
* @author bgamard
*/
public class FileDeletedAsyncListener {
@ -22,7 +25,7 @@ public class FileDeletedAsyncListener {
/**
* File deleted.
*
*
* @param event File deleted event
* @throws Exception e
*/
@ -32,6 +35,24 @@ public class FileDeletedAsyncListener {
if (log.isInfoEnabled()) {
log.info("File deleted event: " + event.toString());
}
TransactionUtil.handle(() -> {
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId());
if (user != null) {
Long fileSize = event.getFileSize();
if (fileSize.equals(File.UNKNOWN_SIZE)) {
// The file size was not in the database, in this case we need to get from the unencrypted size.
fileSize = FileUtil.getFileSize(event.getFileId(), user);
}
if (! fileSize.equals(File.UNKNOWN_SIZE)) {
user.setStorageCurrent(user.getStorageCurrent() - fileSize);
userDao.updateQuota(user);
}
}
});
// Delete the file from storage
FileUtil.delete(event.getFileId());

View File

@ -9,6 +9,7 @@ import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService;
import com.sismics.docs.core.service.FileSizeService;
import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.indexing.IndexingHandler;
@ -65,6 +66,11 @@ public class AppContext {
*/
private FileService fileService;
/**
* File size service.
*/
private FileSizeService fileSizeService;
/**
* Asynchronous executors.
*/
@ -102,6 +108,11 @@ public class AppContext {
inboxService.startAsync();
inboxService.awaitRunning();
// Start file size service
fileSizeService = new FileSizeService();
fileSizeService.startAsync();
fileSizeService.awaitRunning();
// Register fonts
PdfUtil.registerFonts();
@ -238,6 +249,10 @@ public class AppContext {
fileService.stopAsync();
}
if (fileSizeService != null) {
fileSizeService.stopAsync();
}
instance = null;
}
}

View File

@ -88,6 +88,14 @@ public class File implements Loggable {
@Column(name = "FIL_LATESTVERSION_B", nullable = false)
private boolean latestVersion;
public static final Long UNKNOWN_SIZE = -1L;
/**
* Can be {@link File#UNKNOWN_SIZE} if the size has not been stored in the database when the file has been uploaded
*/
@Column(name = "FIL_SIZE_N", nullable = false)
private Long size;
/**
* Private key to decrypt the file.
* Not saved to database, of course.
@ -204,6 +212,18 @@ public class File implements Loggable {
return this;
}
/**
* Can return {@link File#UNKNOWN_SIZE} if the file size is not stored in the database.
*/
public Long getSize() {
return size;
}
public File setSize(Long size) {
this.size = size;
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)

View File

@ -0,0 +1,78 @@
package com.sismics.docs.core.service;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Service that retrieve files sizes when they are not in the database.
*/
public class FileSizeService extends AbstractScheduledService {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileSizeService.class);
public FileSizeService() {
}
@Override
protected void startUp() {
log.info("File size service starting up");
}
@Override
protected void shutDown() {
log.info("File size service shutting down");
}
private static final int BATCH_SIZE = 30;
@Override
protected void runOneIteration() {
try {
TransactionUtil.handle(() -> {
FileDao fileDao = new FileDao();
List<File> files = fileDao.getFilesWithUnknownSize(BATCH_SIZE);
for(File file : files) {
processFile(file);
}
if(files.size() < BATCH_SIZE) {
log.info("No more file to process, stopping the service");
stopAsync();
}
});
} catch (Throwable e) {
log.error("Exception during file service iteration", e);
}
}
void processFile(File file) {
UserDao userDao = new UserDao();
User user = userDao.getById(file.getUserId());
if(user == null) {
return;
}
long fileSize = FileUtil.getFileSize(file.getId(), user);
if(fileSize != File.UNKNOWN_SIZE){
FileDao fileDao = new FileDao();
file.setSize(fileSize);
fileDao.update(file);
}
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedDelaySchedule(0, 1, TimeUnit.MINUTES);
}
}

View File

@ -1,14 +1,11 @@
package com.sismics.docs.core.service;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.ConfigUtil;

View File

@ -16,6 +16,9 @@ import com.sismics.util.Scalr;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.CountingInputStream;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -46,7 +49,7 @@ public class FileUtil {
/**
* File ID of files currently being processed.
*/
private static Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
private static final Set<String> processingFileSet = Collections.synchronizedSet(new HashSet<>());
/**
* Optical character recognition on an image.
@ -149,6 +152,7 @@ public class FileUtil {
file.setName(StringUtils.abbreviate(name, 200));
file.setMimeType(mimeType);
file.setUserId(userId);
file.setSize(fileSize);
// Get files of this document
FileDao fileDao = new FileDao();
@ -240,4 +244,31 @@ public class FileUtil {
public static boolean isProcessingFile(String fileId) {
return processingFileSet.contains(fileId);
}
/**
* Get the size of a file on disk.
*
* @param fileId the file id
* @param user the file owner
* @return the size or -1 if something went wrong
*/
public static long getFileSize(String fileId, User user) {
// To get the size we copy the decrypted content into a null output stream
// and count the copied byte size.
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
if (! Files.exists(storedFile)) {
log.debug("File does not exist " + fileId);
return File.UNKNOWN_SIZE;
}
try (InputStream fileInputStream = Files.newInputStream(storedFile);
InputStream inputStream = EncryptionUtil.decryptInputStream(fileInputStream, user.getPrivateKey());
CountingInputStream countingInputStream = new CountingInputStream(inputStream);
) {
IOUtils.copy(countingInputStream, NullOutputStream.NULL_OUTPUT_STREAM);
return countingInputStream.getByteCount();
} catch (Exception e) {
log.debug("Can't find size of file " + fileId, e);
return File.UNKNOWN_SIZE;
}
}
}

View File

@ -62,6 +62,7 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
if (ldapConnection == null) {
return null;
}
ldapConnection.bind();
EntryCursor cursor = ldapConnection.search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);

View File

@ -3,7 +3,6 @@ package com.sismics.docs.core.util.format;
import com.google.common.collect.Lists;
import com.sismics.util.ClasspathScanner;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
/**

View File

@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.io.Closer;
import com.lowagie.text.*;
import com.lowagie.text.pdf.PdfWriter;

View File

@ -26,9 +26,18 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.simple.SimpleQueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
@ -47,7 +56,12 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Timestamp;
import java.util.*;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Lucene indexing handler.
@ -242,34 +256,28 @@ public class LuceneIndexingHandler implements IndexingHandler {
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, ");
sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D is null group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias
if (criteria.getTargetIdList() != null) {
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
parameterMap.put("targetIdList", criteria.getTargetIdList());
if (!SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
}
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSearch(), criteria.getFullSearch());
parameterMap.put("targetIdList", criteria.getTargetIdList());
if (!Strings.isNullOrEmpty(criteria.getSimpleSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
documentSearchMap = search(criteria.getSimpleSearch(), criteria.getFullSearch());
if (documentSearchMap.isEmpty()) {
// If the search doesn't find any document, the request should return nothing
documentSearchMap.put(UUID.randomUUID().toString(), null);
@ -312,7 +320,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
}
}
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) {
if (!criteria.getExcludedTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();
@ -367,8 +375,6 @@ public class LuceneIndexingHandler implements IndexingHandler {
documentDto.setFileId((String) o[i++]);
Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
@ -406,14 +412,14 @@ public class LuceneIndexingHandler implements IndexingHandler {
/**
* Fulltext search in files and documents.
*
* @param searchQuery Search query on metadatas
* @param simpleSearchQuery Search query on metadatas
* @param fullSearchQuery Search query on all fields
* @return Map of document IDs as key and highlight as value
* @throws Exception e
*/
private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception {
private Map<String, String> search(String simpleSearchQuery, String fullSearchQuery) throws Exception {
// The fulltext query searches in all fields
searchQuery = searchQuery + " " + fullSearchQuery;
String searchQuery = simpleSearchQuery + " " + fullSearchQuery;
// Build search query
Analyzer analyzer = new StandardAnalyzer();

View File

@ -68,7 +68,7 @@ public class PaginatedLists {
}
/**
* Executes a query and returns the data of the currunt page.
* Executes a query and returns the data of the current page.
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
@ -82,18 +82,6 @@ public class PaginatedLists {
q.setMaxResults(paginatedList.getLimit());
return q.getResultList();
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).
*
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters
* @return List of results
*/
public static <E> List<Object[]> executePaginatedQuery(PaginatedList<E> paginatedList, QueryParam queryParam) {
executeCountQuery(paginatedList, queryParam);
return executeResultQuery(paginatedList, queryParam);
}
/**
* Executes a paginated request with 2 native queries (one to count the number of results, and one to return the page).

View File

@ -26,7 +26,7 @@ import java.util.Properties;
public final class EMF {
private static final Logger log = LoggerFactory.getLogger(EMF.class);
private static Map<Object, Object> properties;
private static Properties properties;
private static EntityManagerFactory emfInstance;
@ -59,7 +59,7 @@ public final class EMF {
}
}
private static Map<Object, Object> getEntityManagerProperties() {
private static Properties getEntityManagerProperties() {
// Use properties file if exists
try {
URL hibernatePropertiesUrl = EMF.class.getResource("/hibernate.properties");
@ -79,9 +79,13 @@ public final class EMF {
String databaseUrl = System.getenv("DATABASE_URL");
String databaseUsername = System.getenv("DATABASE_USER");
String databasePassword = System.getenv("DATABASE_PASSWORD");
String databasePoolSize = System.getenv("DATABASE_POOL_SIZE");
if(databasePoolSize == null) {
databasePoolSize = "10";
}
log.info("Configuring EntityManager from environment parameters");
Map<Object, Object> props = new HashMap<>();
Properties props = new Properties();
Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
if (Strings.isNullOrEmpty(databaseUrl)) {
@ -92,7 +96,7 @@ public final class EMF {
props.put("hibernate.connection.username", "sa");
} else {
props.put("hibernate.connection.driver_class", "org.postgresql.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQL94Dialect");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
props.put("hibernate.connection.url", databaseUrl);
props.put("hibernate.connection.username", databaseUsername);
props.put("hibernate.connection.password", databasePassword);
@ -103,7 +107,7 @@ public final class EMF {
props.put("hibernate.max_fetch_depth", "5");
props.put("hibernate.cache.use_second_level_cache", "false");
props.put("hibernate.connection.initial_pool_size", "1");
props.put("hibernate.connection.pool_size", "10");
props.put("hibernate.connection.pool_size", databasePoolSize);
props.put("hibernate.connection.pool_validation_interval", "5");
return props;
}
@ -136,4 +140,4 @@ public final class EMF {
public static String getDriver() {
return (String) properties.get("hibernate.connection.driver_class");
}
}
}

View File

@ -1 +1 @@
db.version=28
db.version=30

View File

@ -0,0 +1,2 @@
alter table T_FILE add column FIL_SIZE_N bigint not null default -1;
update T_CONFIG set CFG_VALUE_C = '29' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,2 @@
create index IDX_FIL_IDDOC_C ON T_FILE (FIL_IDDOC_C ASC);
update T_CONFIG set CFG_VALUE_C = '30' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,49 @@
package com.sismics;
import java.io.InputStream;
import java.net.URL;
public abstract class BaseTest {
protected static final String FILE_CSV = "document.csv";
protected static final String FILE_DOCX = "document.docx";
protected static final String FILE_GIF = "image.gif";
protected static final String FILE_JPG = "apollo_portrait.jpg";
protected static final Long FILE_JPG_SIZE = 7_907L;
protected static final String FILE_JPG2 = "apollo_landscape.jpg";
protected static final String FILE_MP4 = "video.mp4";
protected static final String FILE_ODT = "document.odt";
protected static final String FILE_PDF = "udhr.pdf";
protected static final String FILE_PDF_ENCRYPTED = "udhr_encrypted.pdf";
protected static final String FILE_PDF_SCANNED = "scanned.pdf";
protected static final String FILE_PNG = "image.png";
protected static final String FILE_PPTX = "apache.pptx";
protected static final String FILE_TXT = "document.txt";
protected static final String FILE_WEBM = "video.webm";
protected static final String FILE_XLSX = "document.xlsx";
protected static final String FILE_ZIP = "document.zip";
protected static URL getResource(String fileName) {
return ClassLoader.getSystemResource("file/" + fileName);
}
protected static InputStream getSystemResourceAsStream(String fileName) {
return ClassLoader.getSystemResourceAsStream("file/" + fileName);
}
}

View File

@ -1,21 +1,36 @@
package com.sismics.docs;
import com.sismics.BaseTest;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.EncryptionUtil;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.jpa.EMF;
import com.sismics.util.mime.MimeType;
import org.junit.After;
import org.junit.Before;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import java.io.InputStream;
import java.nio.file.Files;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
/**
* Base class of tests with a transactional context.
*
* @author jtremeaux
*/
public abstract class BaseTransactionalTest {
public abstract class BaseTransactionalTest extends BaseTest {
@Before
public void setUp() throws Exception {
public void setUp() {
// Initialize the entity manager
EntityManager em = EMF.get().createEntityManager();
ThreadLocalContext context = ThreadLocalContext.get();
@ -25,6 +40,35 @@ public abstract class BaseTransactionalTest {
}
@After
public void tearDown() throws Exception {
public void tearDown() {
ThreadLocalContext.get().getEntityManager().getTransaction().rollback();
}
protected User createUser(String userName) throws Exception {
UserDao userDao = new UserDao();
User user = new User();
user.setUsername(userName);
user.setPassword("12345678");
user.setEmail("toto@docs.com");
user.setRoleId("admin");
user.setStorageQuota(100_000L);
userDao.create(user, userName);
return user;
}
protected File createFile(User user, long fileSize) throws Exception {
FileDao fileDao = new FileDao();
try(InputStream inputStream = getSystemResourceAsStream(FILE_JPG)) {
File file = new File();
file.setId("apollo_portrait");
file.setUserId(user.getId());
file.setVersion(0);
file.setMimeType(MimeType.IMAGE_JPEG);
file.setSize(fileSize);
String fileId = fileDao.create(file, user.getId());
Cipher cipher = EncryptionUtil.getEncryptionCipher(user.getPrivateKey());
Files.copy(new CipherInputStream(inputStream, cipher), DirectoryUtil.getStorageDirectory().resolve(fileId), REPLACE_EXISTING);
return file;
}
}
}

View File

@ -18,22 +18,16 @@ public class TestJpa extends BaseTransactionalTest {
public void testJpa() throws Exception {
// Create a user
UserDao userDao = new UserDao();
User user = new User();
user.setUsername("username");
user.setPassword("12345678");
user.setEmail("toto@docs.com");
user.setRoleId("admin");
user.setStorageQuota(10L);
String id = userDao.create(user, "me");
User user = createUser("testJpa");
TransactionUtil.commit();
// Search a user by his ID
user = userDao.getById(id);
user = userDao.getById(user.getId());
Assert.assertNotNull(user);
Assert.assertEquals("toto@docs.com", user.getEmail());
// Authenticate using the database
Assert.assertNotNull(new InternalAuthenticationHandler().authenticate("username", "12345678"));
Assert.assertNotNull(new InternalAuthenticationHandler().authenticate("testJpa", "12345678"));
}
}

View File

@ -0,0 +1,52 @@
package com.sismics.docs.core.listener.async;
import com.sismics.docs.BaseTransactionalTest;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.TransactionUtil;
import org.junit.Assert;
import org.junit.Test;
public class FileDeletedAsyncListenerTest extends BaseTransactionalTest {
@Test
public void updateQuotaSizeKnown() throws Exception {
User user = createUser("updateQuotaSizeKnown");
File file = createFile(user, FILE_JPG_SIZE);
UserDao userDao = new UserDao();
user = userDao.getById(user.getId());
user.setStorageCurrent(10_000L);
userDao.updateQuota(user);
FileDeletedAsyncListener fileDeletedAsyncListener = new FileDeletedAsyncListener();
TransactionUtil.commit();
FileDeletedAsyncEvent event = new FileDeletedAsyncEvent();
event.setFileSize(FILE_JPG_SIZE);
event.setFileId(file.getId());
event.setUserId(user.getId());
fileDeletedAsyncListener.on(event);
Assert.assertEquals(userDao.getById(user.getId()).getStorageCurrent(), Long.valueOf(10_000 - FILE_JPG_SIZE));
}
@Test
public void updateQuotaSizeUnknown() throws Exception {
User user = createUser("updateQuotaSizeUnknown");
File file = createFile(user, File.UNKNOWN_SIZE);
UserDao userDao = new UserDao();
user = userDao.getById(user.getId());
user.setStorageCurrent(10_000L);
userDao.updateQuota(user);
FileDeletedAsyncListener fileDeletedAsyncListener = new FileDeletedAsyncListener();
TransactionUtil.commit();
FileDeletedAsyncEvent event = new FileDeletedAsyncEvent();
event.setFileSize(FILE_JPG_SIZE);
event.setFileId(file.getId());
event.setUserId(user.getId());
fileDeletedAsyncListener.on(event);
Assert.assertEquals(userDao.getById(user.getId()).getStorageCurrent(), Long.valueOf(10_000 - FILE_JPG_SIZE));
}
}

View File

@ -0,0 +1,22 @@
package com.sismics.docs.core.service;
import com.sismics.docs.BaseTransactionalTest;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import org.junit.Assert;
import org.junit.Test;
public class TestFileSizeService extends BaseTransactionalTest {
@Test
public void processFileTest() throws Exception {
User user = createUser("processFileTest");
FileDao fileDao = new FileDao();
File file = createFile(user, File.UNKNOWN_SIZE);
FileSizeService fileSizeService = new FileSizeService();
fileSizeService.processFile(file);
Assert.assertEquals(fileDao.getFile(file.getId()).getSize(), Long.valueOf(FILE_JPG_SIZE));
}
}

View File

@ -2,6 +2,7 @@ package com.sismics.docs.core.util;
import com.google.common.base.Strings;
import com.google.common.io.ByteStreams;
import com.sismics.BaseTest;
import org.junit.Assert;
import org.junit.Test;
@ -14,7 +15,7 @@ import java.io.InputStream;
*
* @author bgamard
*/
public class TestEncryptUtil {
public class TestEncryptUtil extends BaseTest {
@Test
public void generatePrivateKeyTest() {
String key = EncryptionUtil.generatePrivateKey();
@ -31,9 +32,9 @@ public class TestEncryptUtil {
// NOP
}
Cipher cipher = EncryptionUtil.getEncryptionCipher("OnceUponATime");
InputStream inputStream = new CipherInputStream(this.getClass().getResourceAsStream("/file/udhr.pdf"), cipher);
InputStream inputStream = new CipherInputStream(getSystemResourceAsStream(FILE_PDF), cipher);
byte[] encryptedData = ByteStreams.toByteArray(inputStream);
byte[] assertData = ByteStreams.toByteArray(this.getClass().getResourceAsStream("/file/udhr_encrypted.pdf"));
byte[] assertData = ByteStreams.toByteArray(getSystemResourceAsStream(FILE_PDF_ENCRYPTED));
Assert.assertEquals(encryptedData.length, assertData.length);
}
@ -41,9 +42,9 @@ public class TestEncryptUtil {
@Test
public void decryptStreamTest() throws Exception {
InputStream inputStream = EncryptionUtil.decryptInputStream(
this.getClass().getResourceAsStream("/file/udhr_encrypted.pdf"), "OnceUponATime");
getSystemResourceAsStream(FILE_PDF_ENCRYPTED), "OnceUponATime");
byte[] encryptedData = ByteStreams.toByteArray(inputStream);
byte[] assertData = ByteStreams.toByteArray(this.getClass().getResourceAsStream("/file/udhr.pdf"));
byte[] assertData = ByteStreams.toByteArray(getSystemResourceAsStream(FILE_PDF));
Assert.assertEquals(encryptedData.length, assertData.length);
}

View File

@ -2,6 +2,7 @@ package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import com.sismics.BaseTest;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.format.*;
@ -23,11 +24,11 @@ import java.util.Date;
*
* @author bgamard
*/
public class TestFileUtil {
public class TestFileUtil extends BaseTest {
@Test
public void extractContentOpenDocumentTextTest() throws Exception {
Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, "document.odt"));
Path path = Paths.get(getResource(FILE_ODT).toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, FILE_ODT));
Assert.assertNotNull(formatHandler);
Assert.assertTrue(formatHandler instanceof OdtFormatHandler);
String content = formatHandler.extractContent("eng", path);
@ -36,8 +37,8 @@ public class TestFileUtil {
@Test
public void extractContentOfficeDocumentTest() throws Exception {
Path path = Paths.get(ClassLoader.getSystemResource("file/document.docx").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, "document.docx"));
Path path = Paths.get(getResource(FILE_DOCX).toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, FILE_DOCX));
Assert.assertNotNull(formatHandler);
Assert.assertTrue(formatHandler instanceof DocxFormatHandler);
String content = formatHandler.extractContent("eng", path);
@ -46,8 +47,8 @@ public class TestFileUtil {
@Test
public void extractContentPowerpointTest() throws Exception {
Path path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, "apache.pptx"));
Path path = Paths.get(getResource(FILE_PPTX).toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, FILE_PPTX));
Assert.assertNotNull(formatHandler);
Assert.assertTrue(formatHandler instanceof PptxFormatHandler);
String content = formatHandler.extractContent("eng", path);
@ -56,8 +57,8 @@ public class TestFileUtil {
@Test
public void extractContentPdf() throws Exception {
Path path = Paths.get(ClassLoader.getSystemResource("file/udhr.pdf").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, "udhr.pdf"));
Path path = Paths.get(getResource(FILE_PDF).toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, FILE_PDF));
Assert.assertNotNull(formatHandler);
Assert.assertTrue(formatHandler instanceof PdfFormatHandler);
String content = formatHandler.extractContent("eng", path);
@ -66,8 +67,8 @@ public class TestFileUtil {
@Test
public void extractContentScannedPdf() throws Exception {
Path path = Paths.get(ClassLoader.getSystemResource("file/scanned.pdf").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, "scanned.pdf"));
Path path = Paths.get(getResource("scanned.pdf").toURI());
FormatHandler formatHandler = FormatHandlerUtil.find(MimeTypeUtil.guessMimeType(path, FILE_PDF_SCANNED));
Assert.assertNotNull(formatHandler);
Assert.assertTrue(formatHandler instanceof PdfFormatHandler);
String content = formatHandler.extractContent("eng", path);
@ -76,12 +77,12 @@ public class TestFileUtil {
@Test
public void convertToPdfTest() throws Exception {
try (InputStream inputStream0 = Resources.getResource("file/apollo_landscape.jpg").openStream();
InputStream inputStream1 = Resources.getResource("file/apollo_portrait.jpg").openStream();
InputStream inputStream2 = Resources.getResource("file/udhr_encrypted.pdf").openStream();
InputStream inputStream3 = Resources.getResource("file/document.docx").openStream();
InputStream inputStream4 = Resources.getResource("file/document.odt").openStream();
InputStream inputStream5 = Resources.getResource("file/apache.pptx").openStream()) {
try (InputStream inputStream0 = getSystemResourceAsStream(FILE_JPG2);
InputStream inputStream1 = getSystemResourceAsStream(FILE_JPG);
InputStream inputStream2 = getSystemResourceAsStream(FILE_PDF_ENCRYPTED);
InputStream inputStream3 = getSystemResourceAsStream(FILE_DOCX);
InputStream inputStream4 = getSystemResourceAsStream(FILE_ODT);
InputStream inputStream5 = getSystemResourceAsStream(FILE_PPTX)) {
// Document
DocumentDto documentDto = new DocumentDto();
documentDto.setTitle("My super document 1");

View File

@ -1,5 +1,6 @@
package com.sismics.util;
import com.sismics.BaseTest;
import com.sismics.util.mime.MimeType;
import com.sismics.util.mime.MimeTypeUtil;
import org.junit.Assert;
@ -13,59 +14,59 @@ import java.nio.file.Paths;
*
* @author bgamard
*/
public class TestMimeTypeUtil {
public class TestMimeTypeUtil extends BaseTest {
@Test
public void test() throws Exception {
// Detect ODT files
Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI());
Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, "document.odt"));
Path path = Paths.get(getResource(FILE_ODT).toURI());
Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, FILE_ODT));
// Detect DOCX files
path = Paths.get(ClassLoader.getSystemResource("file/document.docx").toURI());
Assert.assertEquals(MimeType.OFFICE_DOCUMENT, MimeTypeUtil.guessMimeType(path, "document.odt"));
path = Paths.get(getResource(FILE_DOCX).toURI());
Assert.assertEquals(MimeType.OFFICE_DOCUMENT, MimeTypeUtil.guessMimeType(path, FILE_ODT));
// Detect PPTX files
path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI());
Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, "apache.pptx"));
path = Paths.get(getResource(FILE_PPTX).toURI());
Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, FILE_PPTX));
// Detect XLSX files
path = Paths.get(ClassLoader.getSystemResource("file/document.xlsx").toURI());
Assert.assertEquals(MimeType.OFFICE_SHEET, MimeTypeUtil.guessMimeType(path, "document.xlsx"));
path = Paths.get(getResource(FILE_XLSX).toURI());
Assert.assertEquals(MimeType.OFFICE_SHEET, MimeTypeUtil.guessMimeType(path, FILE_XLSX));
// Detect TXT files
path = Paths.get(ClassLoader.getSystemResource("file/document.txt").toURI());
Assert.assertEquals(MimeType.TEXT_PLAIN, MimeTypeUtil.guessMimeType(path, "document.txt"));
path = Paths.get(getResource(FILE_TXT).toURI());
Assert.assertEquals(MimeType.TEXT_PLAIN, MimeTypeUtil.guessMimeType(path, FILE_TXT));
// Detect CSV files
path = Paths.get(ClassLoader.getSystemResource("file/document.csv").toURI());
Assert.assertEquals(MimeType.TEXT_CSV, MimeTypeUtil.guessMimeType(path, "document.csv"));
path = Paths.get(getResource(FILE_CSV).toURI());
Assert.assertEquals(MimeType.TEXT_CSV, MimeTypeUtil.guessMimeType(path, FILE_CSV));
// Detect PDF files
path = Paths.get(ClassLoader.getSystemResource("file/udhr.pdf").toURI());
Assert.assertEquals(MimeType.APPLICATION_PDF, MimeTypeUtil.guessMimeType(path, "udhr.pdf"));
path = Paths.get(getResource(FILE_PDF).toURI());
Assert.assertEquals(MimeType.APPLICATION_PDF, MimeTypeUtil.guessMimeType(path, FILE_PDF));
// Detect JPEG files
path = Paths.get(ClassLoader.getSystemResource("file/apollo_portrait.jpg").toURI());
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(path, "apollo_portrait.jpg"));
path = Paths.get(getResource(FILE_JPG).toURI());
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(path, FILE_JPG));
// Detect GIF files
path = Paths.get(ClassLoader.getSystemResource("file/image.gif").toURI());
Assert.assertEquals(MimeType.IMAGE_GIF, MimeTypeUtil.guessMimeType(path, "image.gif"));
path = Paths.get(getResource(FILE_GIF).toURI());
Assert.assertEquals(MimeType.IMAGE_GIF, MimeTypeUtil.guessMimeType(path, FILE_GIF));
// Detect PNG files
path = Paths.get(ClassLoader.getSystemResource("file/image.png").toURI());
Assert.assertEquals(MimeType.IMAGE_PNG, MimeTypeUtil.guessMimeType(path, "image.png"));
path = Paths.get(getResource(FILE_PNG).toURI());
Assert.assertEquals(MimeType.IMAGE_PNG, MimeTypeUtil.guessMimeType(path, FILE_PNG));
// Detect ZIP files
path = Paths.get(ClassLoader.getSystemResource("file/document.zip").toURI());
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(path, "document.zip"));
path = Paths.get(getResource(FILE_ZIP).toURI());
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(path, FILE_ZIP));
// Detect WEBM files
path = Paths.get(ClassLoader.getSystemResource("file/video.webm").toURI());
Assert.assertEquals(MimeType.VIDEO_WEBM, MimeTypeUtil.guessMimeType(path, "video.webm"));
path = Paths.get(getResource(FILE_WEBM).toURI());
Assert.assertEquals(MimeType.VIDEO_WEBM, MimeTypeUtil.guessMimeType(path, FILE_WEBM));
// Detect MP4 files
path = Paths.get(ClassLoader.getSystemResource("file/video.mp4").toURI());
Assert.assertEquals(MimeType.VIDEO_MP4, MimeTypeUtil.guessMimeType(path, "video.mp4"));
path = Paths.get(getResource(FILE_MP4).toURI());
Assert.assertEquals(MimeType.VIDEO_MP4, MimeTypeUtil.guessMimeType(path, FILE_MP4));
}
}

View File

@ -1,5 +1,6 @@
package com.sismics.util.format;
import com.sismics.BaseTest;
import com.sismics.docs.core.util.format.PdfFormatHandler;
import org.junit.Assert;
import org.junit.Test;
@ -11,14 +12,14 @@ import java.nio.file.Paths;
*
* @author bgamard
*/
public class TestPdfFormatHandler {
public class TestPdfFormatHandler extends BaseTest {
/**
* Test related to https://github.com/sismics/docs/issues/373.
*/
@Test
public void testIssue373() throws Exception {
PdfFormatHandler formatHandler = new PdfFormatHandler();
String content = formatHandler.extractContent("deu", Paths.get(ClassLoader.getSystemResource("file/issue373.pdf").toURI()));
String content = formatHandler.extractContent("deu", Paths.get(getResource("issue373.pdf").toURI()));
Assert.assertTrue(content.contains("Aufrechterhaltung"));
Assert.assertTrue(content.contains("Außentemperatur"));
Assert.assertTrue(content.contains("Grundumsatzmessungen"));

View File

@ -8,6 +8,7 @@ import com.sismics.util.JsonUtil;
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
import java.io.IOException;
import java.nio.file.Files;
@ -18,12 +19,15 @@ import java.nio.file.Files;
*/
public class RestUtil {
/**
* Transform a File into its JSON representation
* Transform a File into its JSON representation.
* If the file size it is not stored in the database the size can be wrong
* because the encrypted file size is used.
* @param fileDb a file
* @return the JSON
*/
public static JsonObjectBuilder fileToJsonObjectBuilder(File fileDb) {
try {
long fileSize = fileDb.getSize().equals(File.UNKNOWN_SIZE) ? Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId())) : fileDb.getSize();
return Json.createObjectBuilder()
.add("id", fileDb.getId())
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
@ -32,7 +36,7 @@ public class RestUtil {
.add("mimetype", fileDb.getMimeType())
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
.add("create_date", fileDb.getCreateDate().getTime())
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId())));
.add("size", fileSize);
} catch (IOException e) {
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
}

View File

@ -21,7 +21,7 @@ public class ValidationUtil {
private static Pattern ALPHANUMERIC_PATTERN = Pattern.compile("[a-zA-Z0-9_]+");
private static Pattern USERNAME_PATTERN = Pattern.compile("[a-zA-Z0-9_@\\.]+");
private static Pattern USERNAME_PATTERN = Pattern.compile("[a-zA-Z0-9_@.-]+");
/**
* Checks that the argument is not null.

View File

@ -25,6 +25,9 @@ import jakarta.ws.rs.core.UriBuilder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
@ -39,7 +42,9 @@ public abstract class BaseJerseyTest extends JerseyTest {
protected static final String FILE_DOCUMENT_ODT = "file/document.odt";
protected static final String FILE_DOCUMENT_TXT = "file/document.txt";
protected static final String FILE_EINSTEIN_ROOSEVELT_LETTER_PNG = "file/Einstein-Roosevelt-letter.png";
protected static final long FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE = 292641L;
protected static final String FILE_PIA_00452_JPG = "file/PIA00452.jpg";
protected static final long FILE_PIA_00452_JPG_SIZE = 163510L;
protected static final String FILE_VIDEO_WEBM = "file/video.webm";
protected static final String FILE_WIKIPEDIA_PDF = "file/wikipedia.pdf";
protected static final String FILE_WIKIPEDIA_ZIP = "file/wikipedia.zip";
@ -58,7 +63,11 @@ public abstract class BaseJerseyTest extends JerseyTest {
* Test mail server.
*/
private Wiser wiser;
public String adminToken() {
return clientUtil.login("admin", "admin", false);
}
@Override
protected TestContainerFactory getTestContainerFactory() throws TestContainerException {
return new ExternalTestContainerFactory();

View File

@ -129,6 +129,12 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-all</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@ -182,7 +188,7 @@
</systemProperties>
<webApp>
<contextPath>/docs-web</contextPath>
<overrideDescriptor>src/dev/main/webapp/web-override.xml</overrideDescriptor>
<overrideDescriptor>${project.basedir}/src/dev/main/webapp/web-override.xml</overrideDescriptor>
</webApp>
</configuration>
</plugin>
@ -260,8 +266,8 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<warSourceDirectory>${basedir}/src/main/webapp/dist</warSourceDirectory>
<webXml>src\main\webapp\WEB-INF\web.xml</webXml>
<warSourceDirectory>${project.basedir}/src/main/webapp/dist</warSourceDirectory>
<webXml>src/main/webapp/WEB-INF/web.xml</webXml>
</configuration>
</plugin>
</plugins>

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=28
db.version=30

View File

@ -1,16 +1,26 @@
package com.sismics.docs.rest.resource;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.*;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.RelationDao;
import com.sismics.docs.core.dao.RouteStepDao;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.*;
import com.sismics.docs.core.dao.dto.AclDto;
import com.sismics.docs.core.dao.dto.ContributorDto;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.RelationDto;
import com.sismics.docs.core.dao.dto.RouteStepDto;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.event.DocumentDeletedAsyncEvent;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
@ -19,10 +29,15 @@ import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.*;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.util.DocumentUtil;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.MetadataUtil;
import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.docs.rest.util.DocumentSearchCriteriaUtil;
import com.sismics.rest.exception.ClientException;
import com.sismics.rest.exception.ForbiddenClientException;
import com.sismics.rest.exception.ServerException;
@ -33,55 +48,55 @@ import com.sismics.util.EmailUtil;
import com.sismics.util.JsonUtil;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.mime.MimeType;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObjectBuilder;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.DELETE;
import jakarta.ws.rs.FormParam;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.HEAD;
import jakarta.ws.rs.NotFoundException;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.PUT;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.StreamingOutput;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObjectBuilder;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.internet.MimeMessage;
import jakarta.ws.rs.*;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.text.MessageFormat;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
/**
* Document REST resources.
*
*
* @author bgamard
*/
@Path("/document")
public class DocumentResource extends BaseResource {
protected static final DateTimeParser YEAR_PARSER = DateTimeFormat.forPattern("yyyy").getParser();
protected static final DateTimeParser MONTH_PARSER = DateTimeFormat.forPattern("yyyy-MM").getParser();
protected static final DateTimeParser DAY_PARSER = DateTimeFormat.forPattern("yyyy-MM-dd").getParser();
private static final DateTimeFormatter DAY_FORMATTER = new DateTimeFormatter(null, DAY_PARSER);
private static final DateTimeFormatter MONTH_FORMATTER = new DateTimeFormatter(null, MONTH_PARSER);
private static final DateTimeFormatter YEAR_FORMATTER = new DateTimeFormatter(null, YEAR_PARSER);
private static final DateTimeParser[] DATE_PARSERS = new DateTimeParser[]{
YEAR_PARSER,
MONTH_PARSER,
DAY_PARSER};
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder().append( null, DATE_PARSERS).toFormatter();
/**
* Returns a document.
*
@ -89,8 +104,8 @@ public class DocumentResource extends BaseResource {
* @apiName GetDocument
* @apiGroup Document
* @apiParam {String} id Document ID
* @apiParam {String} share Share ID
* @apiParam {Booleans} files If true includes files information
* @apiParam {String} [share] Share ID
* @apiParam {Boolean} [files] If true includes files information
* @apiSuccess {String} id ID
* @apiSuccess {String} title Title
* @apiSuccess {String} description Description
@ -112,6 +127,7 @@ public class DocumentResource extends BaseResource {
* @apiSuccess {String} coverage Coverage
* @apiSuccess {String} rights Rights
* @apiSuccess {String} creator Username of the creator
* @apiSuccess {String} file_id Main file ID
* @apiSuccess {Boolean} writable True if the document is writable by the current user
* @apiSuccess {Object[]} acls List of ACL
* @apiSuccess {String} acls.id ID
@ -163,22 +179,24 @@ public class DocumentResource extends BaseResource {
@QueryParam("share") String shareId,
@QueryParam("files") Boolean files) {
authenticate();
DocumentDao documentDao = new DocumentDao();
DocumentDto documentDto = documentDao.getDocument(documentId, PermType.READ, getTargetIdList(shareId));
if (documentDto == null) {
throw new NotFoundException();
}
JsonObjectBuilder document = Json.createObjectBuilder()
.add("id", documentDto.getId())
.add("title", documentDto.getTitle())
.add("description", JsonUtil.nullable(documentDto.getDescription()))
.add("create_date", documentDto.getCreateTimestamp())
.add("update_date", documentDto.getUpdateTimestamp())
.add("language", documentDto.getLanguage())
.add("shared", documentDto.getShared())
.add("file_count", documentDto.getFileCount());
JsonObjectBuilder document = createDocumentObjectBuilder(documentDto)
.add("creator", documentDto.getCreator())
.add("coverage", JsonUtil.nullable(documentDto.getCoverage()))
.add("file_count", documentDto.getFileCount())
.add("format", JsonUtil.nullable(documentDto.getFormat()))
.add("identifier", JsonUtil.nullable(documentDto.getIdentifier()))
.add("publisher", JsonUtil.nullable(documentDto.getPublisher()))
.add("rights", JsonUtil.nullable(documentDto.getRights()))
.add("source", JsonUtil.nullable(documentDto.getSource()))
.add("subject", JsonUtil.nullable(documentDto.getSubject()))
.add("type", JsonUtil.nullable(documentDto.getType()));
List<TagDto> tagDtoList = null;
if (principal.isAnonymous()) {
@ -192,26 +210,8 @@ public class DocumentResource extends BaseResource {
.setTargetIdList(getTargetIdList(null)) // No tags for shares
.setDocumentId(documentId),
new SortCriteria(1, true));
JsonArrayBuilder tags = Json.createArrayBuilder();
for (TagDto tagDto : tagDtoList) {
tags.add(Json.createObjectBuilder()
.add("id", tagDto.getId())
.add("name", tagDto.getName())
.add("color", tagDto.getColor()));
}
document.add("tags", tags);
document.add("tags", createTagsArrayBuilder(tagDtoList));
}
// Below is specific to GET /document/id
document.add("subject", JsonUtil.nullable(documentDto.getSubject()));
document.add("identifier", JsonUtil.nullable(documentDto.getIdentifier()));
document.add("publisher", JsonUtil.nullable(documentDto.getPublisher()));
document.add("format", JsonUtil.nullable(documentDto.getFormat()));
document.add("source", JsonUtil.nullable(documentDto.getSource()));
document.add("type", JsonUtil.nullable(documentDto.getType()));
document.add("coverage", JsonUtil.nullable(documentDto.getCoverage()));
document.add("rights", JsonUtil.nullable(documentDto.getRights()));
document.add("creator", documentDto.getCreator());
// Add ACL
AclUtil.addAcls(document, documentId, getTargetIdList(shareId));
@ -235,7 +235,7 @@ public class DocumentResource extends BaseResource {
}
document.add("inherited_acls", aclList);
}
// Add contributors
ContributorDao contributorDao = new ContributorDao();
List<ContributorDto> contributorDtoList = contributorDao.getByDocumentId(documentId);
@ -246,7 +246,7 @@ public class DocumentResource extends BaseResource {
.add("email", contributorDto.getEmail()));
}
document.add("contributors", contributorList);
// Add relations
RelationDao relationDao = new RelationDao();
List<RelationDto> relationDtoList = relationDao.getByDocumentId(documentId);
@ -285,7 +285,7 @@ public class DocumentResource extends BaseResource {
return Response.ok().entity(document.build()).build();
}
/**
* Export a document to PDF.
*
@ -295,7 +295,6 @@ public class DocumentResource extends BaseResource {
* @apiParam {String} id Document ID
* @apiParam {String} share Share ID
* @apiParam {Boolean} metadata If true, export metadata
* @apiParam {Boolean} comments If true, export comments
* @apiParam {Boolean} fitimagetopage If true, fit the images to pages
* @apiParam {Number} margin Margin around the pages, in millimeter
* @apiSuccess {String} pdf The whole response is the PDF file
@ -307,7 +306,6 @@ public class DocumentResource extends BaseResource {
* @param documentId Document ID
* @param shareId Share ID
* @param metadata Export metadata
* @param comments Export comments
* @param fitImageToPage Fit images to page
* @param marginStr Margins
* @return Response
@ -318,21 +316,20 @@ public class DocumentResource extends BaseResource {
@PathParam("id") String documentId,
@QueryParam("share") String shareId,
final @QueryParam("metadata") Boolean metadata,
final @QueryParam("comments") Boolean comments,
final @QueryParam("fitimagetopage") Boolean fitImageToPage,
@QueryParam("margin") String marginStr) {
authenticate();
// Validate input
final int margin = ValidationUtil.validateInteger(marginStr, "margin");
// Get document and check read permission
DocumentDao documentDao = new DocumentDao();
final DocumentDto documentDto = documentDao.getDocument(documentId, PermType.READ, getTargetIdList(shareId));
if (documentDto == null) {
throw new NotFoundException();
}
// Get files
FileDao fileDao = new FileDao();
UserDao userDao = new UserDao();
@ -343,7 +340,7 @@ public class DocumentResource extends BaseResource {
User user = userDao.getById(file.getUserId());
file.setPrivateKey(user.getPrivateKey());
}
// Convert to PDF
StreamingOutput stream = outputStream -> {
try {
@ -358,19 +355,36 @@ public class DocumentResource extends BaseResource {
.header("Content-Disposition", "inline; filename=\"" + documentDto.getTitle() + ".pdf\"")
.build();
}
/**
* Returns all documents.
* Returns all documents, if a parameter is considered invalid, the search result will be empty.
*
* @api {get} /document/list Get documents
* @apiName GetDocumentList
* @apiGroup Document
* @apiParam {String} limit Total number of documents to return
* @apiParam {String} offset Start at this index
* @apiParam {Number} sort_column Column index to sort on
* @apiParam {Boolean} asc If true, sort in ascending order
* @apiParam {String} search Search query (see "Document search syntax" on the top of the page for explanations)
* @apiParam {Booleans} files If true includes files information
*
* @apiParam {String} [limit] Total number of documents to return (default is <code>10</code>)
* @apiParam {String} [offset] Start at this index (default is <code>0</code>)
* @apiParam {Number} [sort_column] Column index to sort on
* @apiParam {Boolean} [asc] If <code>true</code> sorts in ascending order
* @apiParam {String} [search] Search query (see "Document search syntax" on the top of the page for explanations) when the input is entered by a human.
* @apiParam {Boolean} [files] If <code>true</code> includes files information
*
* @apiParam {String} [search[after]] The document must have been created after or at the value moment, accepted format is <code>yyyy-MM-dd</code>
* @apiParam {String} [search[before]] The document must have been created before or at the value moment, accepted format is <code>yyyy-MM-dd</code>
* @apiParam {String} [search[by]] The document must have been created by the specified creator's username with an exact match, the user must not be deleted
* @apiParam {String} [search[full]] Used as a search criteria for all fields including the document's files content, several comma-separated values can be specified and the document must match any of them
* @apiParam {String} [search[lang]] The document must be of the specified language (example: <code>en</code>)
* @apiParam {String} [search[mime]] The document must be of the specified mime type (example: <code>image/png</code>)
* @apiParam {String} [search[simple]] Used as a search criteria for all fields except the document's files content, several comma-separated values can be specified and the document must match any of them
* @apiParam {Boolean} [search[shared]] If <code>true</code> the document must be shared, else it is ignored
* @apiParam {String} [search[tag]] The document must contain a tag or a child of a tag that starts with the value, case is ignored, several comma-separated values can be specified and the document must match all tag filters
* @apiParam {String} [search[nottag]] The document must not contain a tag or a child of a tag that starts with the value, case is ignored, several comma-separated values can be specified and the document must match all tag filters
* @apiParam {String} [search[title]] The document's title must be the value, several comma-separated values can be specified and the document must match any of the titles
* @apiParam {String} [search[uafter]] The document must have been updated after or at the value moment, accepted format is <code>yyyy-MM-dd</code>
* @apiParam {String} [search[ubefore]] The document must have been updated before or at the value moment, accepted format is <code>yyyy-MM-dd</code>
* @apiParam {String} [search[workflow]] If the value is <code>me</code> the document must have an active route, for other values the criteria is ignored
*
* @apiSuccess {Number} total Total number of documents
* @apiSuccess {Object[]} documents List of documents
* @apiSuccess {String} documents.id ID
@ -396,6 +410,7 @@ public class DocumentResource extends BaseResource {
* @apiSuccess {String} documents.files.mimetype MIME type
* @apiSuccess {String} documents.files.create_date Create date (timestamp)
* @apiSuccess {String[]} suggestions List of search suggestions
*
* @apiError (client) ForbiddenError Access denied
* @apiError (server) SearchError Error searching in documents
* @apiPermission user
@ -417,19 +432,56 @@ public class DocumentResource extends BaseResource {
@QueryParam("sort_column") Integer sortColumn,
@QueryParam("asc") Boolean asc,
@QueryParam("search") String search,
@QueryParam("files") Boolean files) {
@QueryParam("files") Boolean files,
@QueryParam("search[after]") String searchCreatedAfter,
@QueryParam("search[before]") String searchCreatedBefore,
@QueryParam("search[by]") String searchBy,
@QueryParam("search[full]") String searchFull,
@QueryParam("search[lang]") String searchLang,
@QueryParam("search[mime]") String searchMime,
@QueryParam("search[shared]") Boolean searchShared,
@QueryParam("search[simple]") String searchSimple,
@QueryParam("search[tag]") String searchTag,
@QueryParam("search[nottag]") String searchTagNot,
@QueryParam("search[title]") String searchTitle,
@QueryParam("search[uafter]") String searchUpdatedAfter,
@QueryParam("search[ubefore]") String searchUpdatedBefore,
@QueryParam("search[searchworkflow]") String searchWorkflow
) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
JsonObjectBuilder response = Json.createObjectBuilder();
JsonArrayBuilder documents = Json.createArrayBuilder();
TagDao tagDao = new TagDao();
PaginatedList<DocumentDto> paginatedList = PaginatedLists.create(limit, offset);
List<String> suggestionList = Lists.newArrayList();
SortCriteria sortCriteria = new SortCriteria(sortColumn, asc);
DocumentCriteria documentCriteria = parseSearchQuery(search);
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria().setTargetIdList(getTargetIdList(null)), null);
DocumentCriteria documentCriteria = DocumentSearchCriteriaUtil.parseSearchQuery(search, allTagDtoList);
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
searchBy,
searchCreatedAfter,
searchCreatedBefore,
searchFull,
searchLang,
searchMime,
searchShared,
searchSimple,
searchTag,
searchTagNot,
searchTitle,
searchUpdatedAfter,
searchUpdatedBefore,
searchWorkflow,
allTagDtoList);
documentCriteria.setTargetIdList(getTargetIdList(null));
try {
AppContext.getInstance().getIndexingHandler().findByCriteria(paginatedList, suggestionList, documentCriteria, sortCriteria);
@ -438,11 +490,14 @@ public class DocumentResource extends BaseResource {
}
// Find the files of the documents
Iterable<String> documentsIds = CollectionUtils.collect(paginatedList.getResultList(), DocumentDto::getId);
FileDao fileDao = new FileDao();
List<File> filesList = null;
Map<String, Long> filesCountByDocument = null;
if (Boolean.TRUE == files) {
Iterable<String> documentsIds = CollectionUtils.collect(paginatedList.getResultList(), DocumentDto::getId);
FileDao fileDao = new FileDao();
filesList = fileDao.getByDocumentsIds(documentsIds);
} else {
filesCountByDocument = fileDao.countByDocumentsIds(documentsIds);
}
for (DocumentDto documentDto : paginatedList.getResultList()) {
@ -450,32 +505,26 @@ public class DocumentResource extends BaseResource {
List<TagDto> tagDtoList = tagDao.findByCriteria(new TagCriteria()
.setTargetIdList(getTargetIdList(null))
.setDocumentId(documentDto.getId()), new SortCriteria(1, true));
JsonArrayBuilder tags = Json.createArrayBuilder();
for (TagDto tagDto : tagDtoList) {
tags.add(Json.createObjectBuilder()
.add("id", tagDto.getId())
.add("name", tagDto.getName())
.add("color", tagDto.getColor()));
Long filesCount;
Collection<File> filesOfDocument = null;
if (Boolean.TRUE == files) {
// Find files matching the document
filesOfDocument = CollectionUtils.select(filesList, file -> file.getDocumentId().equals(documentDto.getId()));
filesCount = (long) filesOfDocument.size();
} else {
filesCount = filesCountByDocument.getOrDefault(documentDto.getId(), 0L);
}
JsonObjectBuilder documentObjectBuilder = Json.createObjectBuilder()
.add("id", documentDto.getId())
.add("highlight", JsonUtil.nullable(documentDto.getHighlight()))
.add("file_id", JsonUtil.nullable(documentDto.getFileId()))
.add("title", documentDto.getTitle())
.add("description", JsonUtil.nullable(documentDto.getDescription()))
.add("create_date", documentDto.getCreateTimestamp())
.add("update_date", documentDto.getUpdateTimestamp())
.add("language", documentDto.getLanguage())
.add("shared", documentDto.getShared())
JsonObjectBuilder documentObjectBuilder = createDocumentObjectBuilder(documentDto)
.add("active_route", documentDto.isActiveRoute())
.add("current_step_name", JsonUtil.nullable(documentDto.getCurrentStepName()))
.add("file_count", documentDto.getFileCount())
.add("tags", tags);
.add("highlight", JsonUtil.nullable(documentDto.getHighlight()))
.add("file_count", filesCount)
.add("tags", createTagsArrayBuilder(tagDtoList));
if (Boolean.TRUE == files) {
JsonArrayBuilder filesArrayBuilder = Json.createArrayBuilder();
// Find files matching the document
Collection<File> filesOfDocument = CollectionUtils.select(filesList, file -> file.getDocumentId().equals(documentDto.getId()));
for (File fileDb : filesOfDocument) {
filesArrayBuilder.add(RestUtil.fileToJsonObjectBuilder(fileDb));
}
@ -492,7 +541,7 @@ public class DocumentResource extends BaseResource {
response.add("total", paginatedList.getResultCount())
.add("documents", documents)
.add("suggestions", suggestions);
return Response.ok().entity(response.build()).build();
}
@ -521,188 +570,44 @@ public class DocumentResource extends BaseResource {
@FormParam("sort_column") Integer sortColumn,
@FormParam("asc") Boolean asc,
@FormParam("search") String search,
@FormParam("files") Boolean files) {
return list(limit, offset, sortColumn, asc, search, files);
}
/**
* Parse a query according to the specified syntax, eg.:
* tag:assurance tag:other before:2012 after:2011-09 shared:yes lang:fra thing
*
* @param search Search query
* @return DocumentCriteria
*/
private DocumentCriteria parseSearchQuery(String search) {
DocumentCriteria documentCriteria = new DocumentCriteria();
if (Strings.isNullOrEmpty(search)) {
return documentCriteria;
}
TagDao tagDao = new TagDao();
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria().setTargetIdList(getTargetIdList(null)), null);
UserDao userDao = new UserDao();
String[] criteriaList = search.split(" +");
List<String> query = new ArrayList<>();
List<String> fullQuery = new ArrayList<>();
for (String criteria : criteriaList) {
String[] params = criteria.split(":");
if (params.length != 2 || Strings.isNullOrEmpty(params[0]) || Strings.isNullOrEmpty(params[1])) {
// This is not a special criteria, do a fulltext search on it
fullQuery.add(criteria);
continue;
}
String paramName = params[0];
String paramValue = params[1];
switch (paramName) {
case "tag":
case "!tag":
// New tag criteria
List<TagDto> tagDtoList = TagUtil.findByName(paramValue, allTagDtoList);
if (tagDtoList.isEmpty()) {
// No tag found, the request must return nothing
documentCriteria.getTagIdList().add(Lists.newArrayList(UUID.randomUUID().toString()));
} else {
List<String> tagIdList = Lists.newArrayList();
for (TagDto tagDto : tagDtoList) {
tagIdList.add(tagDto.getId());
List<TagDto> childrenTagDtoList = TagUtil.findChildren(tagDto, allTagDtoList);
for (TagDto childrenTagDto : childrenTagDtoList) {
tagIdList.add(childrenTagDto.getId());
}
}
if (paramName.startsWith("!")) {
documentCriteria.getExcludedTagIdList().add(tagIdList);
} else {
documentCriteria.getTagIdList().add(tagIdList);
}
}
break;
case "after":
case "before":
case "uafter":
case "ubefore":
// New date span criteria
try {
boolean isUpdated = paramName.startsWith("u");
DateTime date = DATE_FORMATTER.parseDateTime(paramValue);
if (paramName.endsWith("before")) {
if (isUpdated) documentCriteria.setUpdateDateMax(date.toDate());
else documentCriteria.setCreateDateMax(date.toDate());
} else {
if (isUpdated) documentCriteria.setUpdateDateMin(date.toDate());
else documentCriteria.setCreateDateMin(date.toDate());
}
} catch (IllegalArgumentException e) {
// Invalid date, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
break;
case "uat":
case "at":
// New specific date criteria
boolean isUpdated = params[0].startsWith("u");
try {
switch (paramValue.length()) {
case 10: {
DateTime date = DATE_FORMATTER.parseDateTime(params[1]);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusDays(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusDays(1).minusSeconds(1).toDate());
}
break;
}
case 7: {
DateTime date = MONTH_FORMATTER.parseDateTime(params[1]);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
}
break;
}
case 4: {
DateTime date = YEAR_FORMATTER.parseDateTime(params[1]);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusYears(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusYears(1).minusSeconds(1).toDate());
}
break;
} default: {
// Invalid format, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
}
} catch (IllegalArgumentException e) {
// Invalid date, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
break;
case "shared":
// New shared state criteria
documentCriteria.setShared(paramValue.equals("yes"));
break;
case "lang":
// New language criteria
if (Constants.SUPPORTED_LANGUAGES.contains(paramValue)) {
documentCriteria.setLanguage(paramValue);
} else {
// Unsupported language, returns no documents
documentCriteria.setLanguage(UUID.randomUUID().toString());
}
break;
case "mime":
// New mime type criteria
documentCriteria.setMimeType(paramValue);
break;
case "by":
// New creator criteria
User user = userDao.getActiveByUsername(paramValue);
if (user == null) {
// This user doesn't exist, return nothing
documentCriteria.setCreatorId(UUID.randomUUID().toString());
} else {
// This user exists, search its documents
documentCriteria.setCreatorId(user.getId());
}
break;
case "workflow":
// New shared state criteria
documentCriteria.setActiveRoute(paramValue.equals("me"));
break;
case "simple":
// New simple search criteria
query.add(paramValue);
break;
case "full":
// New fulltext search criteria
fullQuery.add(paramValue);
break;
case "title":
// New title criteria
documentCriteria.getTitleList().add(paramValue);
break;
default:
fullQuery.add(criteria);
break;
}
}
documentCriteria.setSearch(Joiner.on(" ").join(query));
documentCriteria.setFullSearch(Joiner.on(" ").join(fullQuery));
return documentCriteria;
@FormParam("files") Boolean files,
@FormParam("search[after]") String searchCreatedAfter,
@FormParam("search[before]") String searchCreatedBefore,
@FormParam("search[by]") String searchBy,
@FormParam("search[full]") String searchFull,
@FormParam("search[lang]") String searchLang,
@FormParam("search[mime]") String searchMime,
@FormParam("search[shared]") Boolean searchShared,
@FormParam("search[simple]") String searchSimple,
@FormParam("search[tag]") String searchTag,
@FormParam("search[nottag]") String searchTagNot,
@FormParam("search[title]") String searchTitle,
@FormParam("search[uafter]") String searchUpdatedAfter,
@FormParam("search[ubefore]") String searchUpdatedBefore,
@FormParam("search[searchworkflow]") String searchWorkflow
) {
return list(
limit,
offset,
sortColumn,
asc,
search,
files,
searchCreatedAfter,
searchCreatedBefore,
searchBy,
searchFull,
searchLang,
searchMime,
searchShared,
searchSimple,
searchTag,
searchTagNot,
searchTitle,
searchUpdatedAfter,
searchUpdatedBefore,
searchWorkflow
);
}
/**
@ -772,7 +677,7 @@ public class DocumentResource extends BaseResource {
if (!authenticate()) {
throw new ForbiddenClientException();
}
// Validate input data
title = ValidationUtil.validateLength(title, "title", 1, 100, false);
language = ValidationUtil.validateLength(language, "language", 3, 7, false);
@ -836,7 +741,7 @@ public class DocumentResource extends BaseResource {
.add("id", document.getId());
return Response.ok().entity(response.build()).build();
}
/**
* Updates the document.
*
@ -858,7 +763,7 @@ public class DocumentResource extends BaseResource {
* @apiParam {String[]} [relations] List of related documents ID
* @apiParam {String[]} [metadata_id] List of metadata ID
* @apiParam {String[]} [metadata_value] List of metadata values
* @apiParam {String} language Language
* @apiParam {String} [language] Language
* @apiParam {Number} [create_date] Create date (timestamp)
* @apiSuccess {String} id Document ID
* @apiError (client) ForbiddenError Access denied or document not writable
@ -894,7 +799,7 @@ public class DocumentResource extends BaseResource {
if (!authenticate()) {
throw new ForbiddenClientException();
}
// Validate input data
title = ValidationUtil.validateLength(title, "title", 1, 100, false);
language = ValidationUtil.validateLength(language, "language", 3, 7, false);
@ -911,20 +816,20 @@ public class DocumentResource extends BaseResource {
if (language != null && !Constants.SUPPORTED_LANGUAGES.contains(language)) {
throw new ClientException("ValidationError", MessageFormat.format("{0} is not a supported language", language));
}
// Check write permission
AclDao aclDao = new AclDao();
if (!aclDao.checkPermission(id, PermType.WRITE, getTargetIdList(null))) {
throw new ForbiddenClientException();
}
// Get the document
DocumentDao documentDao = new DocumentDao();
Document document = documentDao.getById(id);
if (document == null) {
throw new NotFoundException();
}
// Update the document
document.setTitle(title);
document.setDescription(description);
@ -942,12 +847,12 @@ public class DocumentResource extends BaseResource {
} else {
document.setCreateDate(createDate);
}
documentDao.update(document, principal.getId());
// Update tags
updateTagList(id, tagList);
// Update relations
updateRelationList(id, relationList);
@ -963,7 +868,7 @@ public class DocumentResource extends BaseResource {
documentUpdatedAsyncEvent.setUserId(principal.getId());
documentUpdatedAsyncEvent.setDocumentId(id);
ThreadLocalContext.get().addAsyncEvent(documentUpdatedAsyncEvent);
JsonObjectBuilder response = Json.createObjectBuilder()
.add("id", id);
return Response.ok().entity(response.build()).build();
@ -1098,39 +1003,25 @@ public class DocumentResource extends BaseResource {
throw new NotFoundException();
}
List<File> fileList = fileDao.getByDocumentId(principal.getId(), id);
// Delete the document
documentDao.delete(id, principal.getId());
long totalSize = 0L;
for (File file : fileList) {
// Store the file size to update the quota
java.nio.file.Path storedFile = DirectoryUtil.getStorageDirectory().resolve(file.getId());
try {
totalSize += Files.size(storedFile);
} catch (IOException e) {
// The file doesn't exists on disk, which is weird, but not fatal
}
// Raise file deleted event
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFileId(file.getId());
fileDeletedAsyncEvent.setFileSize(file.getSize());
ThreadLocalContext.get().addAsyncEvent(fileDeletedAsyncEvent);
}
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(principal.getId());
user.setStorageCurrent(user.getStorageCurrent() - totalSize);
userDao.updateQuota(user);
// Raise a document deleted event
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocumentId(id);
ThreadLocalContext.get().addAsyncEvent(documentDeletedAsyncEvent);
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");
@ -1183,4 +1074,27 @@ public class DocumentResource extends BaseResource {
relationDao.updateRelationList(documentId, documentIdSet);
}
}
private JsonObjectBuilder createDocumentObjectBuilder(DocumentDto documentDto) {
return Json.createObjectBuilder()
.add("create_date", documentDto.getCreateTimestamp())
.add("description", JsonUtil.nullable(documentDto.getDescription()))
.add("file_id", JsonUtil.nullable(documentDto.getFileId()))
.add("id", documentDto.getId())
.add("language", documentDto.getLanguage())
.add("shared", documentDto.getShared())
.add("title", documentDto.getTitle())
.add("update_date", documentDto.getUpdateTimestamp());
}
private static JsonArrayBuilder createTagsArrayBuilder(List<TagDto> tagDtoList) {
JsonArrayBuilder tags = Json.createArrayBuilder();
for (TagDto tagDto : tagDtoList) {
tags.add(Json.createObjectBuilder()
.add("id", tagDto.getId())
.add("name", tagDto.getName())
.add("color", tagDto.getColor()));
}
return tags;
}
}

View File

@ -67,8 +67,8 @@ public class FileResource extends BaseResource {
* This resource accepts only multipart/form-data.
* @apiName PutFile
* @apiGroup File
* @apiParam {String} id Document ID
* @apiParam {String} previousFileId ID of the file to replace by this new version
* @apiParam {String} [id] Document ID
* @apiParam {String} [previousFileId] ID of the file to replace by this new version
* @apiParam {String} file File data
* @apiSuccess {String} status Status OK
* @apiSuccess {String} id File ID
@ -390,8 +390,8 @@ public class FileResource extends BaseResource {
* @api {get} /file/list Get files
* @apiName GetFileList
* @apiGroup File
* @apiParam {String} id Document ID
* @apiParam {String} share Share ID
* @apiParam {String} [id] Document ID
* @apiParam {String} [share] Share ID
* @apiSuccess {Object[]} files List of files
* @apiSuccess {String} files.id ID
* @apiSuccess {String} files.processing True if the file is currently processing
@ -442,7 +442,7 @@ public class FileResource extends BaseResource {
/**
* List all versions of a file.
*
* @api {get} /file/id/versions Get versions of a file
* @api {get} /file/:id/versions Get versions of a file
* @apiName GetFileVersions
* @apiGroup File
* @apiParam {String} id File ID
@ -497,7 +497,6 @@ public class FileResource extends BaseResource {
* @apiName DeleteFile
* @apiGroup File
* @apiParam {String} id File ID
* @apiParam {String} share Share ID
* @apiSuccess {String} status Status OK
* @apiError (client) ForbiddenError Access denied
* @apiError (client) NotFound File or document not found
@ -522,21 +521,11 @@ public class FileResource extends BaseResource {
FileDao fileDao = new FileDao();
fileDao.delete(file.getId(), principal.getId());
// Update the user quota
UserDao userDao = new UserDao();
User user = userDao.getById(principal.getId());
java.nio.file.Path storedFile = DirectoryUtil.getStorageDirectory().resolve(id);
try {
user.setStorageCurrent(user.getStorageCurrent() - Files.size(storedFile));
userDao.updateQuota(user);
} catch (IOException e) {
// The file doesn't exists on disk, which is weird, but not fatal
}
// Raise a new file deleted event
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFileId(file.getId());
fileDeletedAsyncEvent.setFileSize(file.getSize());
ThreadLocalContext.get().addAsyncEvent(fileDeletedAsyncEvent);
if (file.getDocumentId() != null) {

View File

@ -313,7 +313,7 @@ public class GroupResource extends BaseResource {
* @return Response
*/
@DELETE
@Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_@\\.]+}")
@Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_@.-]+}")
public Response removeMember(@PathParam("groupName") String groupName,
@PathParam("username") String username) {
if (!authenticate()) {

View File

@ -195,7 +195,7 @@ public class UserResource extends BaseResource {
* @return Response
*/
@POST
@Path("{username: [a-zA-Z0-9_@\\.]+}")
@Path("{username: [a-zA-Z0-9_@.-]+}")
public Response update(
@PathParam("username") String username,
@FormParam("password") String password,
@ -470,22 +470,8 @@ public class UserResource extends BaseResource {
UserDao userDao = new UserDao();
userDao.delete(principal.getName(), principal.getId());
// Raise deleted events for documents
for (Document document : documentList) {
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocumentId(document.getId());
ThreadLocalContext.get().addAsyncEvent(documentDeletedAsyncEvent);
}
// Raise deleted events for files (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFileId(file.getId());
ThreadLocalContext.get().addAsyncEvent(fileDeletedAsyncEvent);
}
sendDeletionEvents(documentList, fileList);
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");
@ -511,7 +497,7 @@ public class UserResource extends BaseResource {
* @return Response
*/
@DELETE
@Path("{username: [a-zA-Z0-9_@\\.]+}")
@Path("{username: [a-zA-Z0-9_@.-]+}")
public Response delete(@PathParam("username") String username) {
if (!authenticate()) {
throw new ForbiddenClientException();
@ -551,23 +537,9 @@ public class UserResource extends BaseResource {
// Delete the user
userDao.delete(user.getUsername(), principal.getId());
// Raise deleted events for documents
for (Document document : documentList) {
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocumentId(document.getId());
ThreadLocalContext.get().addAsyncEvent(documentDeletedAsyncEvent);
}
// Raise deleted events for files (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFileId(file.getId());
ThreadLocalContext.get().addAsyncEvent(fileDeletedAsyncEvent);
}
sendDeletionEvents(documentList, fileList);
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");
@ -591,7 +563,7 @@ public class UserResource extends BaseResource {
* @return Response
*/
@POST
@Path("{username: [a-zA-Z0-9_@\\.]+}/disable_totp")
@Path("{username: [a-zA-Z0-9_@.-]+}/disable_totp")
public Response disableTotpUsername(@PathParam("username") String username) {
if (!authenticate()) {
throw new ForbiddenClientException();
@ -713,7 +685,7 @@ public class UserResource extends BaseResource {
* @return Response
*/
@GET
@Path("{username: [a-zA-Z0-9_@\\.]+}")
@Path("{username: [a-zA-Z0-9_@.-]+}")
@Produces(MediaType.APPLICATION_JSON)
public Response view(@PathParam("username") String username) {
if (!authenticate()) {
@ -1178,4 +1150,29 @@ public class UserResource extends BaseResource {
}
return null;
}
/**
* Send the events about documents and files being deleted.
* @param documentList A document list
* @param fileList A file list
*/
private void sendDeletionEvents(List<Document> documentList, List<File> fileList) {
// Raise deleted events for documents
for (Document document : documentList) {
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocumentId(document.getId());
ThreadLocalContext.get().addAsyncEvent(documentDeletedAsyncEvent);
}
// Raise deleted events for files (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFileId(file.getId());
fileDeletedAsyncEvent.setFileSize(file.getSize());
ThreadLocalContext.get().addAsyncEvent(fileDeletedAsyncEvent);
}
}
}

View File

@ -0,0 +1,318 @@
package com.sismics.docs.rest.util;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.model.jpa.User;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.UUID;
public class DocumentSearchCriteriaUtil {
private static final DateTimeParser YEAR_PARSER = DateTimeFormat.forPattern("yyyy").getParser();
private static final DateTimeParser MONTH_PARSER = DateTimeFormat.forPattern("yyyy-MM").getParser();
private static final DateTimeParser DAY_PARSER = DateTimeFormat.forPattern("yyyy-MM-dd").getParser();
private static final DateTimeParser[] DATE_PARSERS = new DateTimeParser[]{
YEAR_PARSER,
MONTH_PARSER,
DAY_PARSER};
private static final DateTimeFormatter YEAR_FORMATTER = new DateTimeFormatter(null, YEAR_PARSER);
private static final DateTimeFormatter MONTH_FORMATTER = new DateTimeFormatter(null, MONTH_PARSER);
private static final DateTimeFormatter DAY_FORMATTER = new DateTimeFormatter(null, DAY_PARSER);
private static final DateTimeFormatter DATES_FORMATTER = new DateTimeFormatterBuilder().append(null, DATE_PARSERS).toFormatter();
private static final String PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR = ",";
private static final String WORKFLOW_ME = "me";
/**
* Parse a query according to the specified syntax, eg.:
* tag:assurance tag:other before:2012 after:2011-09 shared:yes lang:fra thing
*
* @param search Search query
* @param allTagDtoList List of tags
* @return DocumentCriteria
*/
public static DocumentCriteria parseSearchQuery(String search, List<TagDto> allTagDtoList) {
DocumentCriteria documentCriteria = new DocumentCriteria();
if (Strings.isNullOrEmpty(search)) {
return documentCriteria;
}
String[] criteriaList = search.split(" +");
List<String> simpleQuery = new ArrayList<>();
List<String> fullQuery = new ArrayList<>();
for (String criteria : criteriaList) {
String[] params = criteria.split(":");
if (params.length != 2 || Strings.isNullOrEmpty(params[0]) || Strings.isNullOrEmpty(params[1])) {
// This is not a special criteria, do a fulltext search on it
fullQuery.add(criteria);
continue;
}
String paramName = params[0];
String paramValue = params[1];
switch (paramName) {
case "tag":
case "!tag":
parseTagCriteria(documentCriteria, paramValue, allTagDtoList, paramName.startsWith("!"));
break;
case "after":
case "before":
case "uafter":
case "ubefore":
parseDateCriteria(documentCriteria, paramValue, DATES_FORMATTER, paramName.startsWith("u"), paramName.endsWith("before"));
break;
case "uat":
case "at":
parseDateAtCriteria(documentCriteria, paramValue, params[0].startsWith("u"));
break;
case "shared":
documentCriteria.setShared(paramValue.equals("yes"));
break;
case "lang":
parseLangCriteria(documentCriteria, paramValue);
break;
case "mime":
documentCriteria.setMimeType(paramValue);
break;
case "by":
parseByCriteria(documentCriteria, paramValue);
break;
case "workflow":
documentCriteria.setActiveRoute(paramValue.equals(WORKFLOW_ME));
break;
case "simple":
simpleQuery.add(paramValue);
break;
case "full":
fullQuery.add(paramValue);
break;
case "title":
documentCriteria.getTitleList().add(paramValue);
break;
default:
fullQuery.add(criteria);
break;
}
}
documentCriteria.setSimpleSearch(Joiner.on(" ").join(simpleQuery));
documentCriteria.setFullSearch(Joiner.on(" ").join(fullQuery));
return documentCriteria;
}
/**
* Fill the document criteria with various possible parameters
*
* @param documentCriteria structure to be filled
* @param searchBy author
* @param searchCreatedAfter creation moment after
* @param searchCreatedBefore creation moment before
* @param searchFull full search
* @param searchLang lang
* @param searchMime mime type
* @param searchShared share state
* @param searchSimple search in
* @param searchTag tags or parent tags
* @param searchNotTag tags or parent tags to ignore
* @param searchTitle title
* @param searchUpdatedAfter update moment after
* @param searchUpdatedBefore update moment before
* @param searchWorkflow exiting workflow
* @param allTagDtoList list of existing tags
*/
public static void addHttpSearchParams(
DocumentCriteria documentCriteria,
String searchBy,
String searchCreatedAfter,
String searchCreatedBefore,
String searchFull,
String searchLang,
String searchMime,
Boolean searchShared,
String searchSimple,
String searchTag,
String searchNotTag,
String searchTitle,
String searchUpdatedAfter,
String searchUpdatedBefore,
String searchWorkflow,
List<TagDto> allTagDtoList
) {
if (searchBy != null) {
parseByCriteria(documentCriteria, searchBy);
}
if (searchCreatedAfter != null) {
parseDateCriteria(documentCriteria, searchCreatedAfter, DAY_FORMATTER, false, false);
}
if (searchCreatedBefore != null) {
parseDateCriteria(documentCriteria, searchCreatedBefore, DAY_FORMATTER, false, true);
}
if (searchFull != null) {
documentCriteria.setFullSearch(Joiner.on(" ").join(searchFull.split(PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR)));
}
if (searchLang != null) {
parseLangCriteria(documentCriteria, searchLang);
}
if (searchMime != null) {
documentCriteria.setMimeType(searchMime);
}
if ((searchShared != null) && searchShared) {
documentCriteria.setShared(true);
}
if (searchSimple != null) {
documentCriteria.setSimpleSearch(Joiner.on(" ").join(searchSimple.split(PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR)));
}
if (searchTitle != null) {
documentCriteria.getTitleList().addAll(Arrays.asList(searchTitle.split(PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR)));
}
if (searchTag != null) {
for (String tag : searchTag.split(PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR)) {
parseTagCriteria(documentCriteria, tag, allTagDtoList, false);
}
}
if (searchNotTag != null) {
for (String tag : searchNotTag.split(PARAMETER_WITH_MULTIPLE_VALUES_SEPARATOR)) {
parseTagCriteria(documentCriteria, tag, allTagDtoList, true);
}
}
if (searchUpdatedAfter != null) {
parseDateCriteria(documentCriteria, searchUpdatedAfter, DAY_FORMATTER, true, false);
}
if (searchUpdatedBefore != null) {
parseDateCriteria(documentCriteria, searchUpdatedBefore, DAY_FORMATTER, true, true);
}
if ((WORKFLOW_ME.equals(searchWorkflow))) {
documentCriteria.setActiveRoute(true);
}
}
private static void parseDateCriteria(DocumentCriteria documentCriteria, String value, DateTimeFormatter formatter, boolean isUpdated, boolean isBefore) {
try {
DateTime date = formatter.parseDateTime(value);
if (isBefore) {
if (isUpdated) {
documentCriteria.setUpdateDateMax(date.toDate());
} else {
documentCriteria.setCreateDateMax(date.toDate());
}
} else {
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
}
}
} catch (IllegalArgumentException e) {
// Invalid date, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
}
private static void parseDateAtCriteria(DocumentCriteria documentCriteria, String value, boolean isUpdated) {
try {
switch (value.length()) {
case 10: {
DateTime date = DATES_FORMATTER.parseDateTime(value);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusDays(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusDays(1).minusSeconds(1).toDate());
}
break;
}
case 7: {
DateTime date = MONTH_FORMATTER.parseDateTime(value);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
}
break;
}
case 4: {
DateTime date = YEAR_FORMATTER.parseDateTime(value);
if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusYears(1).minusSeconds(1).toDate());
} else {
documentCriteria.setCreateDateMin(date.toDate());
documentCriteria.setCreateDateMax(date.plusYears(1).minusSeconds(1).toDate());
}
break;
}
default: {
// Invalid format, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
}
} catch (IllegalArgumentException e) {
// Invalid date, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
}
}
private static void parseTagCriteria(DocumentCriteria documentCriteria, String value, List<TagDto> allTagDtoList, boolean exclusion) {
List<TagDto> tagDtoList = TagUtil.findByName(value, allTagDtoList);
if (tagDtoList.isEmpty()) {
// No tag found, the request must return nothing
documentCriteria.getTagIdList().add(Lists.newArrayList(UUID.randomUUID().toString()));
} else {
List<String> tagIdList = Lists.newArrayList();
for (TagDto tagDto : tagDtoList) {
tagIdList.add(tagDto.getId());
List<TagDto> childrenTagDtoList = TagUtil.findChildren(tagDto, allTagDtoList);
for (TagDto childrenTagDto : childrenTagDtoList) {
tagIdList.add(childrenTagDto.getId());
}
}
if (exclusion) {
documentCriteria.getExcludedTagIdList().add(tagIdList);
} else {
documentCriteria.getTagIdList().add(tagIdList);
}
}
}
private static void parseLangCriteria(DocumentCriteria documentCriteria, String value) {
// New language criteria
if (Constants.SUPPORTED_LANGUAGES.contains(value)) {
documentCriteria.setLanguage(value);
} else {
// Unsupported language, returns no documents
documentCriteria.setLanguage(UUID.randomUUID().toString());
}
}
private static void parseByCriteria(DocumentCriteria documentCriteria, String value) {
User user = new UserDao().getActiveByUsername(value);
if (user == null) {
// This user doesn't exist, return nothing
documentCriteria.setCreatorId(UUID.randomUUID().toString());
} else {
// This user exists, search its documents
documentCriteria.setCreatorId(user.getId());
}
}
}

View File

@ -1,8 +1,9 @@
package com.sismics.docs.core.util;
package com.sismics.docs.rest.util;
import com.sismics.docs.core.dao.dto.TagDto;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
@ -39,10 +40,10 @@ public class TagUtil {
* @return List of filtered tags
*/
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
List<TagDto> tagDtoList = new ArrayList<>();
if (name.isEmpty()) {
return tagDtoList;
return Collections.emptyList();
}
List<TagDto> tagDtoList = new ArrayList<>();
name = name.toLowerCase();
for (TagDto tagDto : allTagDtoList) {
if (tagDto.getName().toLowerCase().startsWith(name)) {

View File

@ -50,11 +50,11 @@ curl -i -X POST -H "Cookie: auth_token=64085630-2ae6-415c-9a92-4b22c107eaa4" htt
## Document search syntax
The `/api/document/list` endpoint use a String `search` parameter.
The `/api/document/list` endpoint use a String `search` parameter, useful when the query is entered by a human.
This parameter is split in segments using the space character (the other whitespace characters are not considered).
If a segment contains exactly one colon (`:`), it will used as a field criteria (see bellow).
If a segment contains exactly one colon (`:`), it will be used as a field criteria (see bellow).
In other cases (zero or more than one colon), the segment will be used as a search criteria for all fields including the document's files content.
### Search fields
@ -69,7 +69,7 @@ If a search `VALUE` is considered invalid, the search result will be empty.
* `at:VALUE`: the document must have been created at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
* `before:VALUE`: the document must have been created before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* `uafter:VALUE`: the document must have been last updated after or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* `at:VALUE`: the document must have been updated at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
* `uat:VALUE`: the document must have been updated at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
* `ubefore:VALUE`: the document must have been updated before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* Language
* `lang:VALUE`: the document must be of the specified language (example: `en`)

View File

@ -429,7 +429,7 @@ angular.module('docs',
prefix: 'locale/',
suffix: '.json?@build.date@'
})
.registerAvailableLanguageKeys(['en', 'es', 'pt', 'fr', 'de', 'el', 'ru', 'it', 'pl', 'zh_CN', 'zh_TW'], {
.registerAvailableLanguageKeys(['en', 'es', 'pt', 'fr', 'de', 'el', 'ru', 'it', 'pl', 'zh_CN', 'zh_TW', 'sq_AL'], {
'en_*': 'en',
'es_*': 'es',
'pt_*': 'pt',
@ -547,7 +547,8 @@ angular.module('docs',
{ key: 'dan', label: 'Dansk' },
{ key: 'nor', label: 'Norsk' },
{ key: 'vie', label: 'Tiếng Việt' },
{ key: 'ces', label: 'Czech' }
{ key: 'ces', label: 'Czech' },
{ key: 'sqi', label: 'Shqip' }
];
})
/**

View File

@ -61,7 +61,7 @@ angular.module('share',
prefix: 'locale/',
suffix: '.json?@build.date@'
})
.registerAvailableLanguageKeys(['en', 'es', 'pt', 'fr', 'de', 'el', 'ru', 'it', 'pl', 'zh_CN', 'zh_TW'], {
.registerAvailableLanguageKeys(['en', 'es', 'pt', 'fr', 'de', 'el', 'ru', 'it', 'pl', 'zh_CN', 'zh_TW', 'sq_AL'], {
'en_*': 'en',
'es_*': 'es',
'pt_*': 'pt',

View File

@ -192,6 +192,7 @@
<span ng-switch-when="pl">Polski</span>
<span ng-switch-when="zh_CN">简体中文</span>
<span ng-switch-when="zh_TW">繁體中文</span>
<span ng-switch-when="sq_AL">Shqip</span>
</span>
<span class="caret"></span>
</a>
@ -207,6 +208,7 @@
<li><a href ng-click="changeLanguage('pl')" ng-class="{ 'bg-info': currentLang == 'pl' }">Polski</a></li>
<li><a href ng-click="changeLanguage('zh_CN')" ng-class="{ 'bg-info': currentLang == 'zh_CN' }">简体中文</a></li>
<li><a href ng-click="changeLanguage('zh_TW')" ng-class="{ 'bg-info': currentLang == 'zh_TW' }">繁體中文</a></li>
<li><a href ng-click="changeLanguage('sq_AL')" ng-class="{ 'bg-info': currentLang == 'sq_AL' }">Shqip</a></li>
</ul>
</li>
<li translate="document.default.footer_sismics"></li>

View File

@ -0,0 +1,150 @@
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"PD",
"MD"
],
"DAY": [
"E Diel",
"E Hënë",
"E Martë",
"E Mërkurë",
"E Enjte",
"E Premte",
"E Shtunë"
],
"ERANAMES": [
"Para Krishtit",
"Pas Krishtit"
],
"ERAS": [
"p.K.",
"n.K."
],
"FIRSTDAYOFWEEK": 1,
"MONTH": [
"Janar",
"Shkurt",
"Mars",
"Prill",
"Maj",
"Qershor",
"Korrik",
"Gusht",
"Shtator",
"Tetor",
"Nëntor",
"Dhjetor"
],
"SHORTDAY": [
"Die",
"Hën",
"Mar",
"Mër",
"Enj",
"Pre",
"Sht"
],
"SHORTMONTH": [
"Jan",
"Shk",
"Mar",
"Pri",
"Maj",
"Qer",
"Kor",
"Gus",
"Sht",
"Tet",
"Nën",
"Dhj"
],
"STANDALONEMONTH": [
"Janar",
"Shkurt",
"Mars",
"Prill",
"Maj",
"Qershor",
"Korrik",
"Gusht",
"Shtator",
"Tetor",
"Nëntor",
"Dhjetor"
],
"WEEKENDRANGE": [
6,
0
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "yy-MM-dd h:mm a",
"shortDate": "yy-MM-dd",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Lek",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "sq-al",
"localeID": "sq_AL",
"pluralCat": function(n, opt_precision) {
var i = n | 0;
var vf = getVF(n, opt_precision);
if (i == 1 && vf.v == 0) {
return PLURAL_CATEGORY.ONE;
}
return PLURAL_CATEGORY.OTHER;
}
});
}]);

View File

@ -0,0 +1,640 @@
{
"login": {
"username": "Emri i përdoruesit",
"password": "Fjalëkalimi",
"validation_code_required": "Kërkohet një kod verifikimi",
"validation_code_title": "Ju keni aktivizuar vërtetimin me dy faktorë në llogarinë tuaj. ",
"validation_code": "Kodi i verifikimit",
"remember_me": "Më kujto mua",
"submit": "Hyni",
"login_as_guest": "Identifikohu si i ftuar",
"login_failed_title": "Identifikimi dështoi",
"login_failed_message": "Emri i përdoruesit ose fjalëkalimi është i pavlefshëm",
"password_lost_btn": "Fjalëkalimi i humbur?",
"password_lost_sent_title": "Email për rivendosjen e fjalëkalimit u dërgua",
"password_lost_sent_message": "Një email është dërguar në <strong>{{ username }}</strong> për të rivendosur fjalëkalimin tuaj",
"password_lost_error_title": "Gabim i rivendosjes së fjalëkalimit",
"password_lost_error_message": "Nuk mund të dërgohet një email për rivendosjen e fjalëkalimit, ju lutemi kontaktoni administratorin tuaj për një rivendosje manuale"
},
"passwordlost": {
"title": "Fjalëkalimi ka humbur",
"message": "Ju lutemi shkruani emrin tuaj të përdoruesit për të marrë një lidhje të rivendosjes së fjalëkalimit. ",
"submit": "Rivendos fjalëkalimin tim"
},
"passwordreset": {
"message": "Ju lutemi shkruani një fjalëkalim të ri",
"submit": "Ndrysho fjalëkalimin tim",
"error_title": "Gabim gjatë ndryshimit të fjalëkalimit tuaj",
"error_message": "Kërkesa juaj për rikuperimin e fjalëkalimit ka skaduar, ju lutemi kërkoni një të re në faqen e hyrjes"
},
"index": {
"toggle_navigation": "Ndrysho navigimin",
"nav_documents": "Dokumentet",
"nav_tags": "Etiketa",
"nav_users_groups": "Përdoruesit",
"error_info": "{{ count }} gabim i ri{{ count > 1 ? 's' : '' }}",
"logged_as": "I identifikuar si {{ username }}",
"nav_settings": "Cilësimet",
"logout": "Shkyç",
"global_quota_warning": "<strong>Paralajmërim!</strong> Kuota globale pothuajse arriti në {{ current | number: 0 }}MB ({{ percent | number: 1 }}%) përdoret në {{ total | number: 0 }}MB"
},
"document": {
"navigation_up": "Ngjitu një nivel",
"toggle_navigation": "Ndrysho navigimin e dosjeve",
"display_mode_list": "Shfaq dokumentet në listë",
"display_mode_grid": "Shfaq dokumentet në rrjet",
"search_simple": "Kërkim i thjeshtë",
"search_fulltext": "Kërkimi i tekstit të plotë",
"search_creator": "Krijuesi",
"search_language": "Gjuhe",
"search_before_date": "Krijuar para kësaj date",
"search_after_date": "Krijuar pas kësaj date",
"search_before_update_date": "Përditësuar përpara kësaj date",
"search_after_update_date": "Përditësuar pas kësaj date",
"search_tags": "Etiketa",
"search_shared": "Vetëm dokumente të përbashkëta",
"search_workflow": "Rrjedha e punës më është caktuar",
"search_clear": "Qartë",
"any_language": "Çdo gjuhë",
"add_document": "Shto një dokument",
"import_eml": "Importo nga një email (format EML)",
"tags": "Etiketa",
"no_tags": "Nuk ka etiketa",
"no_documents": "Asnjë dokument në bazën e të dhënave",
"search": "Kërko",
"search_empty": "Nuk ka ndeshje për <strong>\"{{ search }}\"</strong>",
"shared": "Të përbashkëta",
"current_step_name": "Hapi aktual",
"title": "Titulli",
"description": "Përshkrim",
"contributors": "Kontribuesit",
"language": "Gjuhe",
"creation_date": "Data e krijimit",
"subject": "Subjekti",
"identifier": "Identifikues",
"publisher": "Botues",
"format": "Formati",
"source": "Burimi",
"type": "Lloji",
"coverage": "Mbulimi",
"rights": "Të drejtat",
"relations": "Marrëdhëniet",
"page_size": "Madhësia e faqes",
"page_size_10": "10 për faqe",
"page_size_20": "20 për faqe",
"page_size_30": "30 për faqe",
"upgrade_quota": "Për të përmirësuar kuotën tuaj, pyesni administratorin tuaj",
"quota": "{{ current | number: 0 }}MB ({{ percent | number: 1 }}%) përdoret në {{ total | number: 0 }}MB",
"count": "{{ count }} dokument{{ count > 1 ? 's' : '' }} gjetur",
"last_updated": "Përditësimi i fundit {{ date | timeAgo: dateFormat }}",
"view": {
"delete_comment_title": "Fshi komentin",
"delete_comment_message": "Dëshiron vërtet ta fshish këtë koment?",
"delete_document_title": "Fshi dokumentin",
"delete_document_message": "Dëshiron vërtet ta fshish këtë dokument?",
"shared_document_title": "Dokument i përbashkët",
"shared_document_message": "Ju mund ta ndani këtë dokument duke dhënë këtë lidhje. <br/><input class=\"form-control share-link\" type=\"text\" readonly=\"readonly\" value=\"{{ link }}\" onclick=\"this.select(); document.execCommand('copy');\" />",
"not_found": "Dokumenti nuk u gjet",
"forbidden": "Qasja është e ndaluar",
"download_files": "Shkarko skedarët",
"export_pdf": "Eksporto në PDF",
"by_creator": "nga",
"comments": "Komentet",
"no_comments": "Ende nuk ka komente për këtë dokument",
"add_comment": "Shto një koment",
"error_loading_comments": "Gabim gjatë ngarkimit të komenteve",
"workflow_current": "Hapi aktual i rrjedhës së punës",
"workflow_comment": "Shto një koment të rrjedhës së punës",
"workflow_validated_title": "Hapi i rrjedhës së punës u vërtetua",
"workflow_validated_message": "Hapi i rrjedhës së punës është vërtetuar me sukses.",
"content": {
"content": "përmbajtja",
"delete_file_title": "Fshi skedarin",
"delete_file_message": "Dëshiron vërtet ta fshish këtë skedar?",
"upload_pending": "Në pritje...",
"upload_progress": "Po ngarkohet...",
"upload_error": "Gabim ngarkimi",
"upload_error_quota": "Kuota u arrit",
"drop_zone": "Zvarrit",
"add_files": "Shtoni skedarë",
"file_processing_indicator": "Ky skedar është duke u përpunuar. ",
"reprocess_file": "Ripërpunoni këtë skedar",
"upload_new_version": "Ngarko një version të ri",
"open_versions": "Shfaq historikun e versionit",
"display_mode_list": "Shfaq skedarët në listë",
"display_mode_grid": "Shfaq skedarët në rrjet"
},
"workflow": {
"workflow": "Rrjedha e punës",
"message": "Verifikoni ose vërtetoni dokumentet tuaja me njerëzit e organizatës suaj duke përdorur rrjedhat e punës.",
"workflow_start_label": "Cilin rrjedhë pune të filloni?",
"add_more_workflow": "Shto më shumë flukse pune",
"start_workflow_submit": "Filloni rrjedhën e punës",
"full_name": "<strong>{{ name }}</strong> filloi më {{ create_date | date }}",
"cancel_workflow": "Anuloni rrjedhën aktuale të punës",
"cancel_workflow_title": "Anuloni rrjedhën e punës",
"cancel_workflow_message": "Dëshiron vërtet të anulosh rrjedhën aktuale të punës?",
"no_workflow": "Nuk mund të filloni asnjë rrjedhë pune në këtë dokument."
},
"permissions": {
"permissions": "Lejet",
"message": "Lejet mund të aplikohen drejtpërdrejt në këtë dokument, ose mund të vijnë nga <a href=\"#/tag\">etiketa</a>.",
"title": "Lejet për këtë dokument",
"inherited_tags": "Lejet e trashëguara nga etiketat",
"acl_source": "Nga",
"acl_target": "Për",
"acl_permission": "Leja"
},
"activity": {
"activity": "Aktiviteti",
"message": "Çdo veprim në këtë dokument regjistrohet këtu."
}
},
"edit": {
"document_edited_with_errors": "Dokumenti u redaktua me sukses, por disa skedarë nuk mund të ngarkohen",
"document_added_with_errors": "Dokumenti u shtua me sukses, por disa skedarë nuk mund të ngarkohen",
"quota_reached": "Kuota u arrit",
"primary_metadata": "Meta të dhënat primare",
"title_placeholder": "Një emër i dhënë burimit",
"description_placeholder": "Një llogari e burimit",
"new_files": "Skedarë të rinj",
"orphan_files": "{{ count }} dosje{{ count > 1 ? 's' : '' }}",
"additional_metadata": "Meta të dhëna shtesë",
"subject_placeholder": "Tema e burimit",
"identifier_placeholder": "Një referencë e paqartë për burimin brenda një konteksti të caktuar",
"publisher_placeholder": "Një subjekt përgjegjës për vënien në dispozicion të burimit",
"format_placeholder": "Formati i skedarit, mediumi fizik ose dimensionet e burimit",
"source_placeholder": "Një burim i lidhur nga i cili rrjedh burimi i përshkruar",
"uploading_files": "Skedarët po ngarkohen..."
},
"default": {
"upload_pending": "Në pritje...",
"upload_progress": "Po ngarkohet...",
"upload_error": "Gabim ngarkimi",
"upload_error_quota": "Kuota u arrit",
"quick_upload": "Ngarkimi i shpejtë",
"drop_zone": "Zvarrit",
"add_files": "Shtoni skedarë",
"add_new_document": "Shto në dokument të ri",
"latest_activity": "Aktiviteti i fundit",
"footer_sismics": "E punuar me <span class=\"fas fa-heart\"></span> nga <a href=\"https://www.sismics.com\" target=\"_blank\">Sizmike</a>",
"api_documentation": "Dokumentacioni API",
"feedback": "Na jepni një koment",
"workflow_document_list": "Dokumentet e caktuara për ju",
"select_all": "Selektoj të gjitha",
"select_none": "Zgjidh asnjë"
},
"pdf": {
"export_title": "Eksporto në PDF",
"export_metadata": "Eksporto të dhëna meta",
"export_comments": "Eksporto komente",
"fit_to_page": "Përshtat imazhin në faqe",
"margin": "Marzhi",
"millimeter": "mm"
},
"share": {
"title": "Ndani dokumentin",
"message": "Emërtoni ndarjen nëse dëshironi të ndani disa herë të njëjtin dokument.",
"submit": "Shpërndaje"
}
},
"file": {
"view": {
"previous": "E mëparshme",
"next": "Tjetra",
"not_found": "Skedari nuk u gjet"
},
"edit": {
"title": "Redakto skedarin",
"name": "Emri i skedarit"
},
"versions": {
"title": "Historia e versionit",
"filename": "Emri i skedarit",
"mimetype": "Lloji",
"create_date": "Data e krijimit",
"version": "Version"
}
},
"tag": {
"new_tag": "Etiketë e re",
"search": "Kërko",
"default": {
"title": "Etiketa",
"message_1": "<strong>Etiketa</strong> janë etiketa të lidhura me dokumentet.",
"message_2": "Një dokument mund të etiketohet me etiketa të shumta dhe një etiketë mund të aplikohet në dokumente të shumta.",
"message_3": "Duke perdorur <span class=\"fas fa-pencil-alt\"></span> butonin, ju mund të modifikoni lejet në një etiketë.",
"message_4": "Nëse një etiketë mund të lexohet nga një përdorues ose grup tjetër, dokumentet shoqëruese mund të lexohen gjithashtu nga ata njerëz.",
"message_5": "Për shembull, etiketoni dokumentet e kompanisë suaj me një etiketë <span class=\"label label-info\">Kompania ime</span> dhe shtoni lejen <strong>Mund të lexojë</strong> në një grup <span class=\"btn btn-default\">punonjësit</span>"
},
"edit": {
"delete_tag_title": "Fshi etiketën",
"delete_tag_message": "Dëshiron vërtet ta fshish këtë etiketë?",
"name": "Emri",
"color": "Ngjyrë",
"parent": "Prindi",
"info": "Lejet për këtë etiketë do të zbatohen gjithashtu për dokumentet e etiketuara <span class=\"label label-info\" ng-style=\"{ 'background': color }\">{{ name }}</span>",
"circular_reference_title": "Referencë rrethore",
"circular_reference_message": "Hierarkia e etiketave prind krijon një lak, ju lutemi zgjidhni një prind tjetër."
}
},
"group": {
"profile": {
"members": "Anëtarët",
"no_members": "Asnjë anëtar",
"related_links": "Lidhje të ngjashme",
"edit_group": "Redakto {{ name }} grup"
}
},
"user": {
"profile": {
"groups": "Grupet",
"quota_used": "Kuota e përdorur",
"percent_used": "{{ percent | number: 0 }}% e përdorur",
"related_links": "Lidhje të ngjashme",
"document_created": "Dokumentet e krijuara nga {{ username }}",
"edit_user": "Redakto {{ username }} përdorues"
}
},
"usergroup": {
"search_groups": "Kërkoni në grupe",
"search_users": "Kërkoni në përdoruesit",
"you": "je ti!",
"default": {
"title": "Përdoruesit",
"message": "Këtu mund të shikoni informacione rreth përdoruesve dhe grupeve."
}
},
"settings": {
"menu_personal_settings": "Cilësimet personale",
"menu_user_account": "Llogaria e përdoruesit",
"menu_two_factor_auth": "Autentifikimi me dy faktorë",
"menu_opened_sessions": "Seancat e hapura",
"menu_file_importer": "Importuesi i skedarëve në masë",
"menu_general_settings": "Cilësimet e përgjithshme",
"menu_workflow": "Rrjedha e punës",
"menu_users": "Përdoruesit",
"menu_groups": "Grupet",
"menu_vocabularies": "Fjalorët",
"menu_configuration": "Konfigurimi",
"menu_inbox": "Skanimi i kutisë hyrëse",
"menu_ldap": "Autentifikimi LDAP",
"menu_metadata": "Meta të dhëna të personalizuara",
"menu_monitoring": "Monitorimi",
"ldap": {
"title": "Autentifikimi LDAP",
"enabled": "Aktivizo vërtetimin LDAP",
"host": "Emri i hostit LDAP",
"port": "Porta LDAP (389 si parazgjedhje)",
"usessl": "Aktivizo SSL (ldaps)",
"admin_dn": "Admin DN",
"admin_password": "Fjalëkalimi i administratorit",
"base_dn": "Kërkimi bazë DN",
"filter": "Filtri i kërkimit (duhet të përmbajë USERNAME, p.sh. \"(uid=USERNAME)\")",
"default_email": "Email-i i parazgjedhur për përdoruesin LDAP",
"default_storage": "Hapësira ruajtëse e paracaktuar për përdoruesin LDAP",
"saved": "Konfigurimi LDAP u ruajt me sukses"
},
"user": {
"title": "Menaxhimi i përdoruesve",
"add_user": "Shto një përdorues",
"username": "Emri i përdoruesit",
"create_date": "Krijo datë",
"totp_enabled": "Për këtë llogari është aktivizuar vërtetimi me dy faktorë",
"edit": {
"delete_user_title": "Fshi përdoruesin",
"delete_user_message": "Dëshiron vërtet ta fshish këtë përdorues? ",
"user_used_title": "Përdoruesi në përdorim",
"user_used_message": "Ky përdorues përdoret në rrjedhën e punës \"{{ name }}\"",
"edit_user_failed_title": "Përdoruesi ekziston tashmë",
"edit_user_failed_message": "Ky emër përdoruesi është marrë tashmë nga një përdorues tjetër",
"edit_user_title": "Redakto \"{{ username }}\"",
"add_user_title": "Shto një përdorues",
"username": "Emri i përdoruesit",
"email": "E-mail",
"groups": "Grupet",
"storage_quota": "Kuota e ruajtjes",
"storage_quota_placeholder": "Kuota e hapësirës ruajtëse (në MB)",
"password": "Fjalëkalimi",
"password_confirm": "Fjalëkalimi (konfirmo)",
"disabled": "Përdorues me aftësi të kufizuara",
"password_reset_btn": "Dërgoni një email për rivendosjen e fjalëkalimit te ky përdorues",
"password_lost_sent_title": "Email për rivendosjen e fjalëkalimit u dërgua",
"password_lost_sent_message": "Është dërguar një email për rivendosjen e fjalëkalimit <strong>{{ username }}</strong>",
"disable_totp_btn": "Çaktivizo vërtetimin me dy faktorë për këtë përdorues",
"disable_totp_title": "Çaktivizo vërtetimin me dy faktorë",
"disable_totp_message": "Jeni i sigurt që dëshironi të çaktivizoni vërtetimin me dy faktorë për këtë përdorues?"
}
},
"workflow": {
"title": "Konfigurimi i rrjedhës së punës",
"add_workflow": "Shto një rrjedhë pune",
"name": "Emri",
"create_date": "Krijo datë",
"edit": {
"delete_workflow_title": "Fshi fluksin e punës",
"delete_workflow_message": "Dëshiron vërtet ta fshish këtë rrjedhë pune? ",
"edit_workflow_title": "Redakto \"{{ name }}\"",
"add_workflow_title": "Shto një rrjedhë pune",
"name": "Emri",
"name_placeholder": "Emri ose përshkrimi i hapit",
"drag_help": "Zvarrit dhe lësho për të rirenditur hapin",
"type": "Lloji i hapit",
"type_approve": "Mirato",
"type_validate": "Vërtetoni",
"target": "Caktuar për",
"target_help": "<strong>Mirato:</strong> Pranoni ose refuzoni rishikimin<br/><strong>Vërteto:</strong> Rishikoni dhe vazhdoni rrjedhën e punës",
"add_step": "Shto një hap të rrjedhës së punës",
"actions": "Çfarë ndodh më pas?",
"remove_action": "Hiq veprimin",
"acl_info": "Vetëm përdoruesit dhe grupet e përcaktuara këtu do të mund të fillojnë këtë rrjedhë pune në një dokument"
}
},
"security": {
"enable_totp": "Aktivizo vërtetimin me dy faktorë",
"enable_totp_message": "Sigurohuni që të keni një aplikacion të përputhshëm me TOTP në telefonin tuaj gati për të shtuar një llogari të re",
"title": "Autentifikimi me dy faktorë",
"message_1": "Autentifikimi me dy faktorë ju lejon të shtoni një shtresë sigurie në tuaj <strong>{{ appName }}</strong> llogari.<br/>Përpara se të aktivizoni këtë veçori, sigurohuni që të keni një aplikacion të pajtueshëm me TOTP në telefonin tuaj:",
"message_google_authenticator": "Për Android, iOS dhe Blackberry: <a href=\"https://support.google.com/accounts/answer/1066447\" target=\"_blank\">Google Authenticator</a>",
"message_duo_mobile": "Për Android dhe iOS: <a href=\"https://guide.duo.com/third-party-accounts\" target=\"_blank\">Duo Mobile</a>",
"message_authenticator": "Për Windows Phone: <a href=\"https://www.microsoft.com/en-US/store/apps/Authenticator/9WZDNCRFJ3RJ\" target=\"_blank\">Vërtetuesi</a>",
"message_2": "Këto aplikacione gjenerojnë automatikisht një kod verifikimi që ndryshon pas një periudhe të caktuar kohe.<br/>Do t'ju kërkohet të vendosni këtë kod verifikimi sa herë që identifikoheni <strong>{{ appName }}</strong>.",
"secret_key": "Çelësi juaj sekret është: <strong>{{ secret }}</strong>",
"secret_key_warning": "Konfiguro aplikacionin tënd TOTP në telefonin tënd me këtë çelës sekret tani, nuk do të mund ta qasesh më vonë.",
"totp_enabled_message": "Autentifikimi me dy faktorë është aktivizuar në llogarinë tuaj.<br/>Sa herë që identifikoheni <strong>{{ appName }}</strong>, do t'ju kërkohet një kod verifikimi nga aplikacioni i telefonit tuaj të konfiguruar.<br/>Nëse e humbni telefonin, nuk do të jeni në gjendje të identifikoheni në llogarinë tuaj, por seancat aktive do t'ju lejojnë të rigjeneroni një çelës sekret.",
"disable_totp": {
"disable_totp": "Çaktivizo vërtetimin me dy faktorë",
"message": "Llogaria juaj nuk do të mbrohet më nga vërtetimi me dy faktorë.",
"confirm_password": "Konfirmoni fjalëkalimin tuaj",
"submit": "Çaktivizo vërtetimin me dy faktorë"
},
"test_totp": "Ju lutemi shkruani kodin e vërtetimit të shfaqur në telefonin tuaj:",
"test_code_success": "Kodi i verifikimit në rregull",
"test_code_fail": "Ky kod nuk është i vlefshëm, ju lutemi kontrolloni dy herë nëse telefoni juaj është i konfiguruar siç duhet ose çaktivizoni vërtetimin me dy faktorë"
},
"group": {
"title": "Menaxhimi i grupeve",
"add_group": "Shto një grup",
"name": "Emri",
"edit": {
"delete_group_title": "Fshi grupin",
"delete_group_message": "Dëshiron vërtet ta fshish këtë grup?",
"edit_group_failed_title": "Grupi tashmë ekziston",
"edit_group_failed_message": "Ky emër grupi është marrë tashmë nga një grup tjetër",
"group_used_title": "Grupi në përdorim",
"group_used_message": "Ky grup përdoret në rrjedhën e punës \"{{ name }}\"",
"edit_group_title": "Redakto \"{{ name }}\"",
"add_group_title": "Shto një grup",
"name": "Emri",
"parent_group": "Grupi i prindërve",
"search_group": "Kërkoni një grup",
"members": "Anëtarët",
"new_member": "Anëtar i ri",
"search_user": "Kërkoni një përdorues"
}
},
"account": {
"title": "Llogaria e përdoruesit",
"password": "Fjalëkalimi",
"password_confirm": "Fjalëkalimi (konfirmo)",
"updated": "Llogaria u përditësua me sukses"
},
"config": {
"title_guest_access": "Qasja e mysafirëve",
"message_guest_access": "Qasja e mysafirëve është një mënyrë ku çdokush mund të hyjë {{ appName }} pa fjalëkalim.<br/>Ashtu si një përdorues normal, përdoruesi mysafir mund të qaset vetëm në dokumentet e tij dhe ato të aksesueshme përmes lejeve.<br/>",
"enable_guest_access": "Aktivizo qasjen e vizitorëve",
"disable_guest_access": "Çaktivizo qasjen e vizitorëve",
"title_theme": "Personalizimi i temës",
"title_general": "Konfigurimi i përgjithshëm",
"default_language": "Gjuha e parazgjedhur për dokumentet e reja",
"application_name": "Emri i aplikacionit",
"main_color": "Ngjyra kryesore",
"custom_css": "CSS e personalizuar",
"custom_css_placeholder": "CSS e personalizuar për t'u shtuar pas fletës kryesore të stilit",
"logo": "Logo (madhësia katrore)",
"background_image": "Imazhi i sfondit",
"uploading_image": "Po ngarkon imazhin...",
"title_smtp": "Konfigurimi i emailit",
"smtp_hostname": "Emri i hostit SMTP",
"smtp_port": "Porta SMTP",
"smtp_from": "E-mail i dërguesit",
"smtp_username": "Emri i përdoruesit SMTP",
"smtp_password": "Fjalëkalimi SMTP",
"smtp_updated": "Konfigurimi SMTP u përditësua me sukses",
"webhooks": "Uebhooks",
"webhooks_explain": "Webhooks do të thirren kur të ndodhë ngjarja e specifikuar. ",
"webhook_event": "Ngjarja",
"webhook_url": "URL",
"webhook_create_date": "Krijo datë",
"webhook_add": "Shto një uebhook"
},
"metadata": {
"title": "Konfigurimi i personalizuar i meta të dhënave",
"message": "Këtu mund të shtoni meta të dhëna të personalizuara në dokumentet tuaja si një identifikues i brendshëm ose një datë skadimi. ",
"name": "Emri i meta të dhënave",
"type": "Lloji i meta të dhënave"
},
"inbox": {
"title": "Skanimi i kutisë hyrëse",
"message": "Duke aktivizuar këtë veçori, sistemi do të skanojë kutinë hyrëse të specifikuar çdo minutë <strong>i palexuar</strong> emailet dhe i importoni automatikisht.<br/>Pas importimit të një emaili, ai do të shënohet si i lexuar.<br/>Cilësimet e konfigurimit për <a href=\"https://support.google.com/mail/answer/7126229?hl=en\" target=\"_blank\">Gmail</a>, <a href=\"https://support.office.com/en-us/article/pop-imap-and-smtp-settings-for-outlook-com-d088b986-291d-42b8-9564-9c414e2aa040\" target=\"_blank\">Outlook.com</a>, <a href=\"https://help.yahoo.com/kb/SLN4075.html\" target=\"_blank\">Yahoo</a>.",
"enabled": "Aktivizo skanimin e kutisë hyrëse",
"hostname": "Emri i hostit IMAP",
"port": "Porta IMAP (143 ose 993)",
"starttls": "Aktivizo STARTTLS",
"username": "Emri i përdoruesit IMAP",
"password": "Fjalëkalimi IMAP",
"folder": "Dosja IMAP",
"tag": "Etiketa u shtua në dokumentet e importuara",
"test": "Testoni parametrat",
"last_sync": "Sinkronizimi i fundit: {{ data.date | date: 'medium' }}, {{ data.count }} mesazh{{ data.count > 1 ? 's' : '' }} të importuara",
"test_success": "Lidhja me kutinë hyrëse është e suksesshme ({{ count }} <strong>i palexuar</strong> mesazh{{ count > 1 ? 's' : '' }})",
"test_fail": "Ndodhi një gabim gjatë lidhjes me kutinë hyrëse, ju lutemi kontrolloni parametrat",
"saved": "Konfigurimi IMAP u ruajt me sukses",
"autoTagsEnabled": "Shtoni automatikisht etiketat nga rreshti i subjektit të shënuar me",
"deleteImported": "Fshi mesazhin nga kutia postare pas importimit"
},
"monitoring": {
"background_tasks": "Detyrat e sfondit",
"queued_tasks": "Aktualisht ka {{ count }} detyrat në radhë.",
"queued_tasks_explain": "Përpunimi i skedarëve, krijimi i miniaturave, përditësimi i indeksit, njohja optike e karaktereve janë detyra në sfond. ",
"server_logs": "Regjistrat e serverit",
"log_date": "Data",
"log_tag": "Etiketë",
"log_message": "Mesazh",
"indexing": "Indeksimi",
"indexing_info": "Nëse vëreni mospërputhje në rezultatet e kërkimit, mund të provoni të bëni një riindeksim të plotë. ",
"start_reindexing": "Filloni riindeksimin e plotë",
"reindexing_started": "Ri-indeksimi filloi, ju lutemi prisni derisa të mos ketë më detyra në sfond."
},
"session": {
"title": "Seancat e hapura",
"created_date": "Data e krijimit",
"last_connection_date": "Data e fundit e lidhjes",
"user_agent": "Nga",
"current": "Aktuale",
"current_session": "Ky është sesioni aktual",
"clear_message": "Të gjitha pajisjet e tjera të lidhura me këtë llogari do të shkëputen",
"clear": "Pastro të gjitha seancat e tjera"
},
"vocabulary": {
"title": "Shënimet e fjalorit",
"choose_vocabulary": "Zgjidhni një fjalor për të redaktuar",
"type": "Lloji",
"coverage": "Mbulimi",
"rights": "Të drejtat",
"value": "Vlera",
"order": "Rendit",
"new_entry": "Hyrje e re"
},
"fileimporter": {
"title": "Importuesi i skedarëve në masë",
"advanced_users": "Për përdoruesit e avancuar!",
"need_intro": "Nëse ju duhet:",
"need_1": "Importoni një direktori skedarësh menjëherë",
"need_2": "Skanoni një drejtori për skedarë të rinj dhe importojini ato",
"line_1": "Shkoni në <a href=\"https://github.com/sismics/docs/releases\">sismics/docs/releases</a> dhe shkarkoni mjetin e importuesit të skedarëve për sistemin tuaj.",
"line_2": "Ndiq <a href=\"https://github.com/sismics/docs/tree/master/docs-importer\">udhëzime këtu</a> për të përdorur këtë mjet.",
"line_3": "Skedarët tuaj do të importohen në dokumente sipas konfigurimit të importuesit të skedarëve.",
"download": "Shkarko",
"instructions": "Udhëzimet"
}
},
"feedback": {
"title": "Na jepni një koment",
"message": "Ndonjë sugjerim apo pyetje në lidhje me Teedy? ",
"sent_title": "Komentet u dërguan",
"sent_message": "Faleminderit për komentin tuaj! "
},
"import": {
"title": "Importimi",
"error_quota": "U arrit kufiri i kuotës, kontaktoni administratorin tuaj për të rritur kuotën tuaj",
"error_general": "Ndodhi një gabim gjatë përpjekjes për të importuar skedarin tuaj, ju lutemi sigurohuni që ai është një skedar i vlefshëm EML"
},
"app_share": {
"403": {
"title": "I pa autorizuar",
"message": "Dokumenti që po përpiqeni të shikoni nuk ndahet më"
},
"main": "Kërkoni një lidhje të përbashkët të dokumentit për të hyrë në të"
},
"directive": {
"acledit": {
"acl_target": "Për",
"acl_permission": "Leja",
"add_permission": "Shto një leje",
"search_user_group": "Kërkoni një përdorues ose grup"
},
"auditlog": {
"log_created": "krijuar",
"log_updated": "përditësuar",
"log_deleted": "fshihet",
"Acl": "ACL",
"Comment": "Komentoni",
"Document": "Dokumenti",
"File": "Skedari",
"Group": "Grupi",
"Route": "Rrjedha e punës",
"RouteModel": "Modeli i rrjedhës së punës",
"Tag": "Etiketë",
"User": "Përdoruesi",
"Webhook": "Uebhook"
},
"selectrelation": {
"typeahead": "Shkruani një titull dokumenti"
},
"selecttag": {
"typeahead": "Shkruani një etiketë"
},
"datepicker": {
"current": "Sot",
"clear": "Qartë",
"close": "U krye"
}
},
"filter": {
"filesize": {
"mb": "MB",
"kb": "kB"
}
},
"acl": {
"READ": "Mund të lexojë",
"READWRITE": "Mund të shkruajë",
"WRITE": "Mund të shkruajë",
"USER": "Përdoruesi",
"GROUP": "Grupi",
"SHARE": "Të përbashkëta"
},
"workflow_type": {
"VALIDATE": "Vleresimi",
"APPROVE": "Miratimi"
},
"workflow_transition": {
"APPROVED": "Miratuar",
"REJECTED": "Refuzuar",
"VALIDATED": "E vërtetuar"
},
"validation": {
"required": "E detyrueshme",
"too_short": "Shumë e shkurtër",
"too_long": "Shume gjate",
"email": "Duhet të jetë një e-mail i vlefshëm",
"password_confirm": "Fjalëkalimi dhe konfirmimi i fjalëkalimit duhet të përputhen",
"number": "Numri i kërkuar",
"no_space": "Hapësirat dhe dy pikat nuk lejohen",
"alphanumeric": "Lejohen vetëm shkronja dhe numra"
},
"action_type": {
"ADD_TAG": "Shto një etiketë",
"REMOVE_TAG": "Hiq një etiketë",
"PROCESS_FILES": "Përpunoni skedarët"
},
"pagination": {
"previous": "E mëparshme",
"next": "Tjetra",
"first": "Së pari",
"last": "E fundit"
},
"onboarding": {
"step1": {
"title": "Hera e parë?",
"description": "Nëse është hera juaj e parë në Teedy, klikoni butonin Next, përndryshe mos ngurroni të më mbyllni."
},
"step2": {
"title": "Dokumentet",
"description": "Teedy është i organizuar në dokumente dhe çdo dokument përmban skedarë të shumtë."
},
"step3": {
"title": "Skedarët",
"description": "Mund të shtoni skedarë pas krijimit të një dokumenti ose përpara se të përdorni këtë zonë të ngarkimit të shpejtë."
},
"step4": {
"title": "Kërko",
"description": "Kjo është mënyra kryesore për të gjetur përsëri dokumentet tuaja. "
},
"step5": {
"title": "Etiketa",
"description": "Dokumentet mund të organizohen në etiketa (të cilat janë si super-dosje). "
}
},
"yes": "po",
"no": "Nr",
"ok": "Në rregull",
"cancel": "Anulo",
"share": "Shpërndaje",
"unshare": "Shpërndaje",
"close": "Mbylle",
"add": "Shtoni",
"open": "Hapur",
"see": "Shiko",
"save": "Ruaj",
"export": "Eksporto",
"edit": "Redakto",
"delete": "Fshije",
"rename": "Riemërto",
"download": "Shkarko",
"loading": "Po ngarkohet...",
"send": "Dërgo",
"enabled": "Aktivizuar",
"disabled": "I paaftë"
}

View File

@ -9,7 +9,7 @@
<label class="col-sm-2 control-label" for="inputUserUsername">{{ 'settings.user.edit.username' | translate }}</label>
<div class="col-sm-7">
<input name="userUsername" type="text" id="inputUserUsername" required ng-disabled="isEdit()" class="form-control"
ng-pattern="/^[a-zA-Z0-9_@\.]*$/"
ng-pattern="/^[a-zA-Z0-9_@.-]*$/"
ng-minlength="3" ng-maxlength="50" ng-attr-placeholder="{{ 'settings.user.edit.username' | translate }}" ng-model="user.username"/>
</div>

View File

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="renderer" content="webkit" />
<link rel="shortcut icon" href="favicon.png" />
<link rel="shortcut icon" href="../api/theme/image/logo" />
<!-- ref:css style/style.min.css?@build.date@ -->
<link rel="stylesheet" href="style/bootstrap.css" type="text/css" />
<link rel="stylesheet" href="style/fontawesome.css" type="text/css" />
@ -102,4 +102,4 @@
</div>
</div>
</body>
</html>
</html>

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=28
db.version=30

View File

@ -0,0 +1,41 @@
package com.sismics.docs.rest;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.jpa.EMF;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import org.junit.After;
import org.junit.Before;
public abstract class BaseTransactionalTest {
@Before
public void setUp() {
// Initialize the entity manager
EntityManager em = EMF.get().createEntityManager();
ThreadLocalContext context = ThreadLocalContext.get();
context.setEntityManager(em);
em.getTransaction().begin();
}
@After
public void tearDown() {
ThreadLocalContext.get().getEntityManager().getTransaction().rollback();
}
protected User createUser(String userName) throws Exception {
UserDao userDao = new UserDao();
User user = new User();
user.setUsername(userName);
user.setPassword("12345678");
user.setEmail("toto@docs.com");
user.setRoleId("admin");
user.setStorageQuota(100_000L);
userDao.create(user, userName);
return user;
}
}

View File

@ -1,19 +1,30 @@
package com.sismics.docs.rest;
import java.io.File;
import com.google.common.io.Resources;
import com.icegreen.greenmail.util.GreenMail;
import com.icegreen.greenmail.util.GreenMailUtil;
import com.icegreen.greenmail.util.ServerSetup;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.util.filter.TokenBasedSecurityFilter;
import org.junit.Assert;
import org.junit.Test;
import jakarta.json.JsonArray;
import jakarta.json.JsonObject;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.core.Form;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.Response.Status;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
import org.apache.directory.server.core.factory.DirectoryServiceFactory;
import org.apache.directory.server.core.partition.impl.avl.AvlPartition;
import org.apache.directory.server.ldap.LdapServer;
import org.apache.directory.server.protocol.shared.store.LdifFileLoader;
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.junit.Assert;
import org.junit.Test;
/**
@ -28,7 +39,7 @@ public class TestAppResource extends BaseJerseyTest {
@Test
public void testAppResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Check the application info
JsonObject json = target().path("/app").request()
@ -86,7 +97,7 @@ public class TestAppResource extends BaseJerseyTest {
@Test
public void testLogResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Check the logs (page 1)
JsonObject json = target().path("/app/log")
@ -120,7 +131,7 @@ public class TestAppResource extends BaseJerseyTest {
@Test
public void testGuestLogin() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Try to login as guest
Response response = target().path("/user/login").request()
@ -185,7 +196,7 @@ public class TestAppResource extends BaseJerseyTest {
@Test
public void testSmtpConfiguration() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Get SMTP configuration
JsonObject json = target().path("/app/config_smtp").request()
@ -224,7 +235,7 @@ public class TestAppResource extends BaseJerseyTest {
@Test
public void testInbox() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Create a tag
JsonObject json = target().path("/tag").request()
@ -340,89 +351,90 @@ public class TestAppResource extends BaseJerseyTest {
*/
@Test
public void testLdapAuthentication() throws Exception {
// // Start LDAP server
// final DirectoryServiceFactory factory = new DefaultDirectoryServiceFactory();
// factory.init("Test");
//
// final DirectoryService directoryService = factory.getDirectoryService();
// directoryService.getChangeLog().setEnabled(false);
// directoryService.setShutdownHookEnabled(true);
//
// final Partition partition = new AvlPartition(directoryService.getSchemaManager());
// partition.setId("Test");
// partition.setSuffixDn(new Dn(directoryService.getSchemaManager(), "o=TEST"));
// partition.initialize();
// directoryService.addPartition(partition);
//
// final LdapServer ldapServer = new LdapServer();
// ldapServer.setTransports(new TcpTransport("localhost", 11389));
// ldapServer.setDirectoryService(directoryService);
//
// directoryService.startup();
// ldapServer.start();
//
// // Load test data in LDAP
// new LdifFileLoader(directoryService.getAdminSession(), new File(Resources.getResource("test.ldif").getFile()), null).execute();
//
// // Login admin
// String adminToken = clientUtil.login("admin", "admin", false);
//
// // Get the LDAP configuration
// JsonObject json = target().path("/app/config_ldap").request()
// .cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
// .get(JsonObject.class);
// Assert.assertFalse(json.getBoolean("enabled"));
//
// // Change LDAP configuration
// target().path("/app/config_ldap").request()
// .cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
// .post(Entity.form(new Form()
// .param("enabled", "true")
// .param("host", "localhost")
// .param("port", "11389")
// .param("admin_dn", "uid=admin,ou=system")
// .param("admin_password", "secret")
// .param("base_dn", "o=TEST")
// .param("filter", "(&(objectclass=inetOrgPerson)(uid=USERNAME))")
// .param("default_email", "devnull@teedy.io")
// .param("default_storage", "100000000")
// ), JsonObject.class);
//
// // Get the LDAP configuration
// json = target().path("/app/config_ldap").request()
// .cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
// .get(JsonObject.class);
// Assert.assertTrue(json.getBoolean("enabled"));
// Assert.assertEquals("localhost", json.getString("host"));
// Assert.assertEquals(11389, json.getJsonNumber("port").intValue());
// Assert.assertEquals("uid=admin,ou=system", json.getString("admin_dn"));
// Assert.assertEquals("secret", json.getString("admin_password"));
// Assert.assertEquals("o=TEST", json.getString("base_dn"));
// Assert.assertEquals("(&(objectclass=inetOrgPerson)(uid=USERNAME))", json.getString("filter"));
// Assert.assertEquals("devnull@teedy.io", json.getString("default_email"));
// Assert.assertEquals(100000000L, json.getJsonNumber("default_storage").longValue());
//
// // Login with a LDAP user
// String ldapTopen = clientUtil.login("ldap1", "secret", false);
//
// // Check user informations
// json = target().path("/user").request()
// .cookie(TokenBasedSecurityFilter.COOKIE_NAME, ldapTopen)
// .get(JsonObject.class);
// Assert.assertEquals("ldap1@teedy.io", json.getString("email"));
//
// // List all documents
// json = target().path("/document/list")
// .queryParam("sort_column", 3)
// .queryParam("asc", true)
// .request()
// .cookie(TokenBasedSecurityFilter.COOKIE_NAME, ldapTopen)
// .get(JsonObject.class);
// JsonArray documents = json.getJsonArray("documents");
// Assert.assertEquals(0, documents.size());
//
// // Stop LDAP server
// ldapServer.stop();
// directoryService.shutdown();
// Start LDAP server
final DirectoryServiceFactory factory = new DefaultDirectoryServiceFactory();
factory.init("Test");
final DirectoryService directoryService = factory.getDirectoryService();
directoryService.getChangeLog().setEnabled(false);
directoryService.setShutdownHookEnabled(true);
final Partition partition = new AvlPartition(directoryService.getSchemaManager());
partition.setId("Test");
partition.setSuffixDn(new Dn(directoryService.getSchemaManager(), "o=TEST"));
partition.initialize();
directoryService.addPartition(partition);
final LdapServer ldapServer = new LdapServer();
ldapServer.setTransports(new TcpTransport("localhost", 11389));
ldapServer.setDirectoryService(directoryService);
directoryService.startup();
ldapServer.start();
// Load test data in LDAP
new LdifFileLoader(directoryService.getAdminSession(), new File(Resources.getResource("test.ldif").getFile()), null).execute();
// Login admin
String adminToken = adminToken();
// Get the LDAP configuration
JsonObject json = target().path("/app/config_ldap").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
.get(JsonObject.class);
Assert.assertFalse(json.getBoolean("enabled"));
// Change LDAP configuration
target().path("/app/config_ldap").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
.post(Entity.form(new Form()
.param("enabled", "true")
.param("host", "localhost")
.param("port", "11389")
.param("usessl", "false")
.param("admin_dn", "uid=admin,ou=system")
.param("admin_password", "secret")
.param("base_dn", "o=TEST")
.param("filter", "(&(objectclass=inetOrgPerson)(uid=USERNAME))")
.param("default_email", "devnull@teedy.io")
.param("default_storage", "100000000")
), JsonObject.class);
// Get the LDAP configuration
json = target().path("/app/config_ldap").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
.get(JsonObject.class);
Assert.assertTrue(json.getBoolean("enabled"));
Assert.assertEquals("localhost", json.getString("host"));
Assert.assertEquals(11389, json.getJsonNumber("port").intValue());
Assert.assertEquals("uid=admin,ou=system", json.getString("admin_dn"));
Assert.assertEquals("secret", json.getString("admin_password"));
Assert.assertEquals("o=TEST", json.getString("base_dn"));
Assert.assertEquals("(&(objectclass=inetOrgPerson)(uid=USERNAME))", json.getString("filter"));
Assert.assertEquals("devnull@teedy.io", json.getString("default_email"));
Assert.assertEquals(100000000L, json.getJsonNumber("default_storage").longValue());
// Login with a LDAP user
String ldapTopen = clientUtil.login("ldap1", "secret", false);
// Check user informations
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, ldapTopen)
.get(JsonObject.class);
Assert.assertEquals("ldap1@teedy.io", json.getString("email"));
// List all documents
json = target().path("/document/list")
.queryParam("sort_column", 3)
.queryParam("asc", true)
.request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, ldapTopen)
.get(JsonObject.class);
JsonArray documents = json.getJsonArray("documents");
Assert.assertEquals(0, documents.size());
// Stop LDAP server
ldapServer.stop();
directoryService.shutdown();
}
}
}

View File

@ -273,6 +273,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertFalse(relations.getJsonObject(0).getBoolean("source"));
Assert.assertEquals("My super title document 2", relations.getJsonObject(0).getString("title"));
Assert.assertFalse(json.containsKey("files"));
Assert.assertEquals(file1Id, json.getString("file_id"));
// Get document 2
json = target().path("/document/" + document2Id).request()
@ -285,6 +286,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertTrue(relations.getJsonObject(0).getBoolean("source"));
Assert.assertEquals("My super title document 1", relations.getJsonObject(0).getString("title"));
Assert.assertFalse(json.containsKey("files"));
Assert.assertEquals(file1Id, json.getString("file_id"));
// Create a tag
json = target().path("/tag").request()
@ -818,7 +820,7 @@ public class TestDocumentResource extends BaseJerseyTest {
@Test
public void testCustomMetadata() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Login metadata1
clientUtil.createUser("metadata1");

View File

@ -105,7 +105,7 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals("PIA00452.jpg", files.getJsonObject(0).getString("name"));
Assert.assertEquals("image/jpeg", files.getJsonObject(0).getString("mimetype"));
Assert.assertEquals(0, files.getJsonObject(0).getInt("version"));
Assert.assertEquals(163510L, files.getJsonObject(0).getJsonNumber("size").longValue());
Assert.assertEquals(FILE_PIA_00452_JPG_SIZE, files.getJsonObject(0).getJsonNumber("size").longValue());
Assert.assertEquals(file2Id, files.getJsonObject(1).getString("id"));
Assert.assertEquals("PIA00452.jpg", files.getJsonObject(1).getString("name"));
Assert.assertEquals(0, files.getJsonObject(1).getInt("version"));
@ -370,7 +370,7 @@ public class TestFileResource extends BaseJerseyTest {
.get();
is = (InputStream) response.getEntity();
fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(163510, fileBytes.length);
Assert.assertEquals(FILE_PIA_00452_JPG_SIZE, fileBytes.length);
// Create another document
String document2Id = clientUtil.createDocument(fileOrphanToken);
@ -415,28 +415,19 @@ public class TestFileResource extends BaseJerseyTest {
String file1Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
// Check current quota
JsonObject json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(292641L, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE, getUserQuota(fileQuotaToken));
// Add a file (292641 bytes large)
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
// Check current quota
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE * 2, getUserQuota(fileQuotaToken));
// Add a file (292641 bytes large)
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
// Check current quota
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(877923L, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE * 3, getUserQuota(fileQuotaToken));
// Add a file (292641 bytes large)
try {
@ -446,16 +437,13 @@ public class TestFileResource extends BaseJerseyTest {
}
// Deletes a file
json = target().path("/file/" + file1Id).request()
JsonObject json = target().path("/file/" + file1Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.delete(JsonObject.class);
Assert.assertEquals("ok", json.getString("status"));
// Check current quota
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE * 2, getUserQuota(fileQuotaToken));
// Create a document
long create1Date = new Date().getTime();
@ -472,10 +460,7 @@ public class TestFileResource extends BaseJerseyTest {
clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileQuotaToken, document1Id);
// Check current quota
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(748792, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE * 2 + FILE_PIA_00452_JPG_SIZE, getUserQuota(fileQuotaToken));
// Deletes the document
json = target().path("/document/" + document1Id).request()
@ -484,9 +469,12 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals("ok", json.getString("status"));
// Check current quota
json = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.get(JsonObject.class);
Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue());
Assert.assertEquals(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG_SIZE * 2, getUserQuota(fileQuotaToken));
}
private long getUserQuota(String userToken) {
return target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, userToken)
.get(JsonObject.class).getJsonNumber("storage_current").longValue();
}
}

View File

@ -25,7 +25,7 @@ public class TestGroupResource extends BaseJerseyTest {
@Test
public void testGroupResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Create group hierarchy
clientUtil.createGroup("g1");
@ -189,4 +189,4 @@ public class TestGroupResource extends BaseJerseyTest {
Assert.assertTrue(groupList.contains("g11"));
Assert.assertTrue(groupList.contains("g112"));
}
}
}

View File

@ -22,7 +22,7 @@ public class TestMetadataResource extends BaseJerseyTest {
@Test
public void testMetadataResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Get all metadata with admin
JsonObject json = target().path("/metadata")
@ -79,4 +79,4 @@ public class TestMetadataResource extends BaseJerseyTest {
metadata = json.getJsonArray("metadata");
Assert.assertEquals(0, metadata.size());
}
}
}

View File

@ -22,7 +22,7 @@ public class TestRouteModelResource extends BaseJerseyTest {
@Test
public void testRouteModelResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Login routeModel1
clientUtil.createUser("routeModel1");
@ -138,4 +138,4 @@ public class TestRouteModelResource extends BaseJerseyTest {
routeModels = json.getJsonArray("routemodels");
Assert.assertEquals(1, routeModels.size());
}
}
}

View File

@ -27,7 +27,7 @@ public class TestRouteResource extends BaseJerseyTest {
String route1Token = clientUtil.login("route1");
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Change SMTP configuration to target Wiser
target().path("/app/config_smtp").request()
@ -364,7 +364,7 @@ public class TestRouteResource extends BaseJerseyTest {
@Test
public void testTagActions() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Create an Approved tag
JsonObject json = target().path("/tag").request()
@ -511,4 +511,4 @@ public class TestRouteResource extends BaseJerseyTest {
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
.delete(JsonObject.class);
}
}
}

View File

@ -27,7 +27,7 @@ public class TestThemeResource extends BaseJerseyTest {
@Test
public void testThemeResource() throws Exception {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Get the stylesheet anonymously
String stylesheet = target().path("/theme/stylesheet").request()
@ -104,4 +104,4 @@ public class TestThemeResource extends BaseJerseyTest {
response = target().path("/theme/image/background").request().get();
Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
}
}
}

View File

@ -37,7 +37,7 @@ public class TestUserResource extends BaseJerseyTest {
clientUtil.createUser("alice");
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// List all users
json = target().path("/user/list")
@ -75,7 +75,7 @@ public class TestUserResource extends BaseJerseyTest {
response = target().path("/user").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminToken)
.put(Entity.form(new Form()
.param("username", "bob-")
.param("username", "bob/")
.param("email", "bob@docs.com")
.param("password", "12345678")
.param("storage_quota", "10")));
@ -250,7 +250,7 @@ public class TestUserResource extends BaseJerseyTest {
clientUtil.createUser("admin_user1");
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Check admin information
JsonObject json = target().path("/user").request()
@ -336,7 +336,7 @@ public class TestUserResource extends BaseJerseyTest {
@Test
public void testTotp() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Create totp1 user
clientUtil.createUser("totp1");
@ -425,7 +425,7 @@ public class TestUserResource extends BaseJerseyTest {
@Test
public void testResetPassword() throws Exception {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Change SMTP configuration to target Wiser
target().path("/app/config_smtp").request()
@ -493,4 +493,4 @@ public class TestUserResource extends BaseJerseyTest {
json = response.readEntity(JsonObject.class);
Assert.assertEquals("KeyNotFound", json.getString("type"));
}
}
}

View File

@ -26,7 +26,7 @@ public class TestVocabularyResource extends BaseJerseyTest {
String vocabulary1Token = clientUtil.login("vocabulary1");
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Get coverage vocabularies entries
JsonObject json = target().path("/vocabulary/coverage").request()
@ -109,4 +109,4 @@ public class TestVocabularyResource extends BaseJerseyTest {
.get(JsonObject.class);
Assert.assertEquals(0, json.getJsonArray("entries").size());
}
}
}

View File

@ -24,7 +24,7 @@ public class TestWebhookResource extends BaseJerseyTest {
@Test
public void testWebhookResource() {
// Login admin
String adminToken = clientUtil.login("admin", "admin", false);
String adminToken = adminToken();
// Login webhook1
clientUtil.createUser("webhook1");
@ -85,4 +85,4 @@ public class TestWebhookResource extends BaseJerseyTest {
webhooks = json.getJsonArray("webhooks");
Assert.assertEquals(0, webhooks.size());
}
}
}

View File

@ -0,0 +1,492 @@
package com.sismics.docs.rest.util;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.rest.BaseTransactionalTest;
import com.sismics.util.mime.MimeType;
import org.apache.poi.ss.formula.functions.T;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class TestDocumentSearchCriteriaUtil extends BaseTransactionalTest {
@Test
public void testHttpParamsBy() throws Exception {
User user = createUser("user1");
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
"user1",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getCreatorId(), user.getId());
documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
"missing",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertNotNull(documentCriteria.getCreatorId());
}
@Test
public void testHttpParamsCreatedAfter() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
"2022-03-27",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getCreateDateMin(), new DateTime(2022, 3, 27, 0, 0, 0).toDate());
}
@Test
public void testHttpParamsCreatedBefore() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
"2022-03-27",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getCreateDateMax(), new DateTime(2022, 3, 27, 0, 0, 0).toDate());
}
@Test
public void testHttpParamsFull() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
"full",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getFullSearch(), "full");
}
@Test
public void testHttpParamsLang() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
"fra",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getLanguage(), "fra");
documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
"unknown",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertNotNull(documentCriteria.getLanguage());
Assert.assertNotEquals(documentCriteria.getLanguage(), "unknown");
}
@Test
public void testHttpParamsMime() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
MimeType.IMAGE_GIF,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getMimeType(), MimeType.IMAGE_GIF);
}
@Test
public void testHttpParamsShared() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
true,
null,
null,
null,
null,
null,
null,
null,
null
);
Assert.assertTrue(documentCriteria.getShared());
}
@Test
public void testHttpParamsSimple() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
"simple",
null,
null,
null,
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getSimpleSearch(), "simple");
}
@Test
public void testHttpParamsTag() throws Exception {
TagDao tagDao = new TagDao();
User user = createUser("user1");
Tag tag1 = new Tag();
tag1.setName("tag1");
tag1.setColor("#bbb");
tag1.setUserId(user.getId());
tagDao.create(tag1, user.getId());
Tag tag2 = new Tag();
tag2.setName("tag2");
tag2.setColor("#bbb");
tag2.setUserId(user.getId());
tagDao.create(tag2, user.getId());
Tag tag3 = new Tag();
tag3.setName("tag3");
tag3.setColor("#bbb");
tag3.setUserId(user.getId());
tag3.setParentId(tag2.getId());
tagDao.create(tag3, user.getId());
DocumentCriteria documentCriteria = new DocumentCriteria();
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria(), null);
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
"tag1",
null,
null,
null,
null,
null,
allTagDtoList
);
Assert.assertEquals(documentCriteria.getTagIdList(), List.of(Collections.singletonList(tag1.getId())));
documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
"tag2",
null,
null,
null,
null,
null,
allTagDtoList
);
Assert.assertEquals(documentCriteria.getTagIdList(), List.of(List.of(tag2.getId(), tag3.getId())));
}
@Test
public void testHttpParamsNotTag() throws Exception {
TagDao tagDao = new TagDao();
User user = createUser("user1");
Tag tag1 = new Tag();
tag1.setName("tag1");
tag1.setColor("#bbb");
tag1.setUserId(user.getId());
tagDao.create(tag1, user.getId());
Tag tag2 = new Tag();
tag2.setName("tag2");
tag2.setColor("#bbb");
tag2.setUserId(user.getId());
tagDao.create(tag2, user.getId());
Tag tag3 = new Tag();
tag3.setName("tag3");
tag3.setColor("#bbb");
tag3.setUserId(user.getId());
tag3.setParentId(tag2.getId());
tagDao.create(tag3, user.getId());
DocumentCriteria documentCriteria = new DocumentCriteria();
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria(), null);
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"tag1",
null,
null,
null,
null,
allTagDtoList
);
Assert.assertEquals(documentCriteria.getExcludedTagIdList(), List.of(Collections.singletonList(tag1.getId())));
documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"tag2",
null,
null,
null,
null,
allTagDtoList
);
Assert.assertEquals(documentCriteria.getExcludedTagIdList(), List.of(List.of(tag2.getId(), tag3.getId())));
}
@Test
public void testHttpParamsTitle() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"title1,title2",
null,
null,
null,
null
);
Assert.assertEquals(documentCriteria.getTitleList(), Arrays.asList(new String[]{"title1", "title2"}));
}
@Test
public void testHttpParamsUpdatedAfter() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"2022-03-27",
null,
null,
null
);
Assert.assertEquals(documentCriteria.getUpdateDateMin(), new DateTime(2022, 3, 27, 0, 0, 0).toDate());
}
@Test
public void testHttpParamsUpdatedBefore() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"2022-03-27",
null,
null
);
Assert.assertEquals(documentCriteria.getUpdateDateMax(), new DateTime(2022, 3, 27, 0, 0, 0).toDate());
}
@Test
public void testHttpParamsWorkflow() {
DocumentCriteria documentCriteria = new DocumentCriteria();
DocumentSearchCriteriaUtil.addHttpSearchParams(
documentCriteria,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"me",
null
);
Assert.assertTrue(documentCriteria.getActiveRoute());
}
}

95
pom.xml
View File

@ -32,10 +32,10 @@
<at.favre.lib.bcrypt.version>0.10.2</at.favre.lib.bcrypt.version>
<org.apache.lucene.version>8.7.0</org.apache.lucene.version>
<org.imgscalr.imgscalr-lib.version>4.2</org.imgscalr.imgscalr-lib.version>
<org.apache.pdfbox.pdfbox.version>2.0.27</org.apache.pdfbox.pdfbox.version>
<org.apache.pdfbox.pdfbox.version>2.0.29</org.apache.pdfbox.pdfbox.version>
<org.bouncycastle.bcprov-jdk15on.version>1.70</org.bouncycastle.bcprov-jdk15on.version>
<joda-time.joda-time.version>2.12.2</joda-time.joda-time.version>
<org.hibernate.hibernate.version>5.6.15.Final</org.hibernate.hibernate.version>
<org.hibernate.hibernate.version>6.3.1.Final</org.hibernate.hibernate.version>
<fr.opensagres.xdocreport.version>2.0.4</fr.opensagres.xdocreport.version>
<net.java.dev.jna.jna.version>5.13.0</net.java.dev.jna.jna.version>
<com.twelvemonkeys.imageio.version>3.9.4</com.twelvemonkeys.imageio.version>
@ -46,14 +46,15 @@
<com.icegreen.greenmail.version>1.6.14</com.icegreen.greenmail.version>
<org.jsoup.jsoup.version>1.15.4</org.jsoup.jsoup.version>
<com.squareup.okhttp3.okhttp.version>4.10.0</com.squareup.okhttp3.okhttp.version>
<org.apache.directory.api.api-all.version>2.1.3</org.apache.directory.api.api-all.version>
<org.apache.directory.api.version>2.1.3</org.apache.directory.api.version>
<org.apache.directory.server.apacheds-all.version>2.0.0.AM27</org.apache.directory.server.apacheds-all.version>
<org.glassfish.jersey.version>3.0.10</org.glassfish.jersey.version>
<jakarta.servlet.jakarta.servlet-api.version>5.0.0</jakarta.servlet.jakarta.servlet-api.version>
<org.eclipse.jetty.jetty-server.version>11.0.14</org.eclipse.jetty.jetty-server.version>
<org.eclipse.jetty.jetty-webapp.version>11.0.14</org.eclipse.jetty.jetty-webapp.version>
<org.eclipse.jetty.jetty-servlet.version>11.0.14</org.eclipse.jetty.jetty-servlet.version>
<!-- Plugins version -->
<org.apache.maven.plugins.maven-antrun-plugin.version>3.1.0</org.apache.maven.plugins.maven-antrun-plugin.version>
<org.apache.maven.plugins.maven-jar-plugin.version>3.3.0</org.apache.maven.plugins.maven-jar-plugin.version>
@ -61,7 +62,7 @@
<org.apache.maven.plugins.maven-surefire-plugin.version>3.0.0</org.apache.maven.plugins.maven-surefire-plugin.version>
<org.eclipse.jetty.jetty-maven-plugin.version>11.0.14</org.eclipse.jetty.jetty-maven-plugin.version>
</properties>
<scm>
<connection>scm:git:https://github.com/sismics/docs.git</connection>
<developerConnection>scm:git:https://github.com/docs/docs.git</developerConnection>
@ -93,7 +94,7 @@
<artifactId>maven-war-plugin</artifactId>
<version>${org.apache.maven.plugins.maven-war-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
@ -109,7 +110,7 @@
<reuseForks>false</reuseForks>
</configuration>
</plugin>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
@ -117,13 +118,13 @@
</plugin>
</plugins>
</build>
<modules>
<module>docs-core</module>
<module>docs-web-common</module>
<module>docs-web</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
@ -131,38 +132,38 @@
<artifactId>docs-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
<type>test-jar</type>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<version>${org.eclipse.jetty.jetty-server.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
<version>${org.eclipse.jetty.jetty-webapp.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
@ -180,7 +181,7 @@
<artifactId>commons-compress</artifactId>
<version>${org.apache.commons.commons-compress.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
@ -198,7 +199,7 @@
<artifactId>commons-email</artifactId>
<version>${org.apache.commons.commons-email.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -222,19 +223,19 @@
<artifactId>log4j</artifactId>
<version>${log4j.log4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${org.slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${org.slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
@ -264,7 +265,7 @@
<artifactId>jersey-container-servlet</artifactId>
<version>${org.glassfish.jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-processing</artifactId>
@ -276,7 +277,7 @@
<artifactId>jersey-media-multipart</artifactId>
<version>${org.glassfish.jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
@ -288,7 +289,7 @@
<artifactId>jersey-client</artifactId>
<version>${org.glassfish.jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.test-framework.providers</groupId>
<artifactId>jersey-test-framework-provider-bundle</artifactId>
@ -307,7 +308,7 @@
<artifactId>jersey-test-framework-provider-grizzly2</artifactId>
<version>${org.glassfish.jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-grizzly2-servlet</artifactId>
@ -327,11 +328,11 @@
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-jakarta</artifactId>
<groupId>org.hibernate.orm</groupId>
<artifactId>hibernate-core</artifactId>
<version>${org.hibernate.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
@ -349,25 +350,25 @@
<artifactId>lucene-core</artifactId>
<version>${org.apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
<version>${org.apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
<version>${org.apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
<version>${org.apache.lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-suggest</artifactId>
@ -385,25 +386,25 @@
<artifactId>imgscalr-lib</artifactId>
<version>${org.imgscalr.imgscalr-lib.version}</version>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
<version>${org.apache.pdfbox.pdfbox.version}</version>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>${org.bouncycastle.bcprov-jdk15on.version}</version>
</dependency>
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.odfdom.converter.pdf</artifactId>
<version>${fr.opensagres.xdocreport.version}</version>
</dependency>
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.poi.xwpf.converter.pdf</artifactId>
@ -436,8 +437,26 @@
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
<version>${org.apache.directory.api.api-all.version}</version>
<artifactId>api-ldap-client-api</artifactId>
<version>${org.apache.directory.api.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-schema-data</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-standalone</artifactId>
<version>${org.apache.directory.api.version}</version>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-all</artifactId>
<version>${org.apache.directory.server.apacheds-all.version}</version>
</dependency>
<!-- ImageIO plugins -->
@ -471,5 +490,5 @@
</dependencies>
</dependencyManagement>
</project>