Refactor documents and files indexing

This commit is contained in:
Benjamin Gamard 2018-03-29 17:59:47 +02:00
parent 899f13cb35
commit d1a8fa38b0
96 changed files with 854 additions and 950 deletions

View File

@ -25,16 +25,6 @@ public class Constants {
*/
public static final String DEFAULT_ADMIN_EMAIL = "admin@localhost";
/**
* RAM Lucene directory storage.
*/
public static final String LUCENE_DIRECTORY_STORAGE_RAM = "RAM";
/**
* File Lucene directory storage.
*/
public static final String LUCENE_DIRECTORY_STORAGE_FILE = "FILE";
/**
* Guest user ID.
*/

View File

@ -1,10 +1,10 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.dto.AclDto;
import com.sismics.docs.core.dao.dto.AclDto;
import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,20 +1,10 @@
package com.sismics.docs.core.dao.jpa;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.persistence.EntityManager;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.AuditLogCriteria;
import com.sismics.docs.core.dao.jpa.dto.AuditLogDto;
import com.sismics.docs.core.dao.criteria.AuditLogCriteria;
import com.sismics.docs.core.dao.dto.AuditLogDto;
import com.sismics.docs.core.model.jpa.AuditLog;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
@ -22,6 +12,10 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import java.sql.Timestamp;
import java.util.*;
/**
* Audit log DAO.
*

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,21 +1,20 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.dto.CommentDto;
import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.dto.CommentDto;
import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.util.context.ThreadLocalContext;
/**
* Comment DAO.
*

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.model.jpa.Config;

View File

@ -1,15 +1,14 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import com.sismics.docs.core.dao.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import com.sismics.docs.core.dao.jpa.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* Contributor DAO.

View File

@ -1,26 +1,19 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.sql.Timestamp;
import java.util.*;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* Document DAO.
@ -191,130 +184,6 @@ public class DocumentDao {
}
}
/**
* Searches documents by criteria.
*
* @param paginatedList List of documents (updated by side effects)
* @param criteria Search criteria
* @param sortCriteria Sort criteria
* @throws Exception e
*/
public void findByCriteria(PaginatedList<DocumentDto> paginatedList, DocumentCriteria criteria, SortCriteria sortCriteria) throws Exception {
Map<String, Object> parameterMap = new HashMap<>();
List<String> criteriaList = new ArrayList<>();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, ");
sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D IS NULL group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias
if (criteria.getTargetIdList() != null) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
parameterMap.put("targetIdList", criteria.getTargetIdList());
}
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
LuceneDao luceneDao = new LuceneDao();
Set<String> documentIdList = luceneDao.search(criteria.getSearch(), criteria.getFullSearch());
if (documentIdList.isEmpty()) {
// If the search doesn't find any document, the request should return nothing
documentIdList.add(UUID.randomUUID().toString());
}
criteriaList.add("d.DOC_ID_C in :documentIdList");
parameterMap.put("documentIdList", documentIdList);
}
if (criteria.getCreateDateMin() != null) {
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
parameterMap.put("createDateMin", criteria.getCreateDateMin());
}
if (criteria.getCreateDateMax() != null) {
criteriaList.add("d.DOC_CREATEDATE_D <= :createDateMax");
parameterMap.put("createDateMax", criteria.getCreateDateMax());
}
if (criteria.getUpdateDateMin() != null) {
criteriaList.add("d.DOC_UPDATEDATE_D >= :updateDateMin");
parameterMap.put("updateDateMin", criteria.getUpdateDateMin());
}
if (criteria.getUpdateDateMax() != null) {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
}
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
int index = 0;
List<String> tagCriteriaList = Lists.newArrayList();
for (String tagId : criteria.getTagIdList()) {
sb.append(String.format("left join T_DOCUMENT_TAG dt%d on dt%d.DOT_IDDOCUMENT_C = d.DOC_ID_C and dt%d.DOT_IDTAG_C = :tagId%d and dt%d.DOT_DELETEDATE_D is null ", index, index, index, index, index));
parameterMap.put("tagId" + index, tagId);
tagCriteriaList.add(String.format("dt%d.DOT_ID_C is not null", index));
index++;
}
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
}
if (criteria.getShared() != null && criteria.getShared()) {
criteriaList.add("s.count > 0");
}
if (criteria.getLanguage() != null) {
criteriaList.add("d.DOC_LANGUAGE_C = :language");
parameterMap.put("language", criteria.getLanguage());
}
if (criteria.getCreatorId() != null) {
criteriaList.add("d.DOC_IDUSER_C = :creatorId");
parameterMap.put("creatorId", criteria.getCreatorId());
}
if (criteria.getActiveRoute() != null && criteria.getActiveRoute()) {
criteriaList.add("rs2.RTP_ID_C is not null");
}
criteriaList.add("d.DOC_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria);
// Assemble results
List<DocumentDto> documentDtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
DocumentDto documentDto = new DocumentDto();
documentDto.setId((String) o[i++]);
documentDto.setTitle((String) o[i++]);
documentDto.setDescription((String) o[i++]);
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
documentDtoList.add(documentDto);
}
paginatedList.setResultList(documentDtoList);
}
/**
* Update a document.
*

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.model.jpa.File;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.GroupCriteria;
import com.sismics.docs.core.dao.jpa.dto.GroupDto;
import com.sismics.docs.core.dao.criteria.GroupCriteria;
import com.sismics.docs.core.dao.dto.GroupDto;
import com.sismics.docs.core.model.jpa.Group;
import com.sismics.docs.core.model.jpa.UserGroup;
import com.sismics.docs.core.util.AuditLogUtil;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.model.jpa.PasswordRecovery;

View File

@ -1,17 +1,12 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import com.sismics.docs.core.dao.dto.RelationDto;
import com.sismics.docs.core.model.jpa.Relation;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import com.sismics.docs.core.dao.jpa.dto.RelationDto;
import com.sismics.docs.core.model.jpa.Relation;
import com.sismics.util.context.ThreadLocalContext;
import java.util.*;
/**
* Relation DAO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.collect.Sets;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.RouteCriteria;
import com.sismics.docs.core.dao.jpa.dto.RouteDto;
import com.sismics.docs.core.dao.criteria.RouteCriteria;
import com.sismics.docs.core.dao.dto.RouteDto;
import com.sismics.docs.core.model.jpa.Route;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.jpa.QueryParam;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.RouteModelCriteria;
import com.sismics.docs.core.dao.jpa.dto.RouteModelDto;
import com.sismics.docs.core.dao.criteria.RouteModelCriteria;
import com.sismics.docs.core.dao.dto.RouteModelDto;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.jpa.QueryParam;

View File

@ -1,11 +1,11 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.RouteStepTransition;
import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.docs.core.dao.jpa.criteria.RouteStepCriteria;
import com.sismics.docs.core.dao.jpa.dto.RouteStepDto;
import com.sismics.docs.core.dao.criteria.RouteStepCriteria;
import com.sismics.docs.core.dao.dto.RouteStepDto;
import com.sismics.docs.core.model.jpa.RouteStep;
import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.QueryUtil;

View File

@ -1,14 +1,13 @@
package com.sismics.docs.core.dao.jpa;
import java.util.Date;
import java.util.UUID;
import javax.persistence.EntityManager;
import javax.persistence.Query;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Share;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.Date;
import java.util.UUID;
/**
* Share DAO.
*

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.model.jpa.DocumentTag;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.AuditLogUtil;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.UserCriteria;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.EncryptionUtil;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa;
package com.sismics.docs.core.dao;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
import java.util.Date;
import java.util.List;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**
* Group criteria.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
import java.util.List;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.criteria;
package com.sismics.docs.core.dao.criteria;
/**
* User criteria.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.PermType;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.AuditLogType;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Comment DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Contributor DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Document DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Group DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Tag DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Route DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Route model DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.util.JsonUtil;

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
/**
* Tag DTO.

View File

@ -1,4 +1,4 @@
package com.sismics.docs.core.dao.jpa.dto;
package com.sismics.docs.core.dao.dto;
import com.google.common.base.MoreObjects;

View File

@ -1,238 +0,0 @@
package com.sismics.docs.core.dao.lucene;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.LuceneUtil;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil;
import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Lucene DAO.
*
* @author bgamard
*/
public class LuceneDao {
/**
* Destroy and rebuild index.
*
* @param fileList List of files
*/
public void rebuildIndex(final List<Document> documentList, final List<File> fileList) {
LuceneUtil.handle(indexWriter -> {
// Empty index
indexWriter.deleteAll();
// Add all documents
for (Document document : documentList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.addDocument(luceneDocument);
}
// Add all files
for (File file : fileList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.addDocument(luceneDocument);
}
});
}
/**
* Add document to the index.
*
* @param document Document to add
*/
public void createDocument(final Document document) {
LuceneUtil.handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.addDocument(luceneDocument);
});
}
/**
* Add file to the index.
*
* @param file File to add
*/
public void createFile(final File file) {
LuceneUtil.handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.addDocument(luceneDocument);
});
}
/**
* Update file index.
*
* @param file Updated file
*/
public void updateFile(final File file) {
LuceneUtil.handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.updateDocument(new Term("id", file.getId()), luceneDocument);
});
}
/**
* Update document index.
*
* @param document Updated document
*/
public void updateDocument(final Document document) {
LuceneUtil.handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.updateDocument(new Term("id", document.getId()), luceneDocument);
});
}
/**
* Delete document from the index.
*
* @param id Document ID to delete
*/
public void deleteDocument(final String id) {
LuceneUtil.handle(indexWriter -> indexWriter.deleteDocuments(new Term("id", id)));
}
/**
* Search files.
*
* @param searchQuery Search query on title and description
* @param fullSearchQuery Search query on all fields
* @return List of document IDs
* @throws Exception e
*/
public Set<String> search(String searchQuery, String fullSearchQuery) throws Exception {
// Escape query and add quotes so QueryParser generate a PhraseQuery
searchQuery = "\"" + QueryParserUtil.escape(searchQuery + " " + fullSearchQuery) + "\"";
fullSearchQuery = "\"" + QueryParserUtil.escape(fullSearchQuery) + "\"";
// Build search query
StandardQueryParser qpHelper = new StandardQueryParser(new StandardAnalyzer());
qpHelper.setPhraseSlop(100); // PhraseQuery add terms
// Search on documents and files
BooleanQuery query = new BooleanQuery.Builder()
.add(qpHelper.parse(searchQuery, "title"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "description"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "subject"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "identifier"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "publisher"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "format"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "source"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "type"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "coverage"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "rights"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "filename"), Occur.SHOULD)
.add(qpHelper.parse(fullSearchQuery, "content"), Occur.SHOULD)
.build();
// Search
DirectoryReader directoryReader = AppContext.getInstance().getIndexingService().getDirectoryReader();
Set<String> documentIdList = new HashSet<>();
if (directoryReader == null) {
// The directory reader is not yet initialized (probably because there is nothing indexed)
return documentIdList;
}
IndexSearcher searcher = new IndexSearcher(directoryReader);
TopDocs topDocs = searcher.search(query, Integer.MAX_VALUE);
ScoreDoc[] docs = topDocs.scoreDocs;
// Extract document IDs
for (ScoreDoc doc : docs) {
org.apache.lucene.document.Document document = searcher.doc(doc.doc);
String type = document.get("doctype");
String documentId = null;
if (type.equals("document")) {
documentId = document.get("id");
} else if (type.equals("file")) {
documentId = document.get("document_id");
}
if (documentId != null) {
documentIdList.add(documentId);
}
}
return documentIdList;
}
/**
* Build Lucene document from database document.
*
* @param document Document
* @return Document
*/
private org.apache.lucene.document.Document getDocumentFromDocument(Document document) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", document.getId(), Field.Store.YES));
luceneDocument.add(new StringField("doctype", "document", Field.Store.YES));
luceneDocument.add(new TextField("title", document.getTitle(), Field.Store.NO));
if (document.getDescription() != null) {
luceneDocument.add(new TextField("description", document.getDescription(), Field.Store.NO));
}
if (document.getSubject() != null) {
luceneDocument.add(new TextField("subject", document.getSubject(), Field.Store.NO));
}
if (document.getIdentifier() != null) {
luceneDocument.add(new TextField("identifier", document.getIdentifier(), Field.Store.NO));
}
if (document.getPublisher() != null) {
luceneDocument.add(new TextField("publisher", document.getPublisher(), Field.Store.NO));
}
if (document.getFormat() != null) {
luceneDocument.add(new TextField("format", document.getFormat(), Field.Store.NO));
}
if (document.getSource() != null) {
luceneDocument.add(new TextField("source", document.getSource(), Field.Store.NO));
}
if (document.getType() != null) {
luceneDocument.add(new TextField("type", document.getType(), Field.Store.NO));
}
if (document.getCoverage() != null) {
luceneDocument.add(new TextField("coverage", document.getCoverage(), Field.Store.NO));
}
if (document.getRights() != null) {
luceneDocument.add(new TextField("rights", document.getRights(), Field.Store.NO));
}
return luceneDocument;
}
/**
* Build Lucene document from file.
*
* @param file File
* @return Document
*/
private org.apache.lucene.document.Document getDocumentFromFile(File file) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", file.getId(), Field.Store.YES));
luceneDocument.add(new StringField("doctype", "file", Field.Store.YES));
if (file.getName() != null) {
luceneDocument.add(new TextField("filename", file.getName(), Field.Store.NO));
}
if (file.getDocumentId() != null) {
luceneDocument.add(new StringField("document_id", file.getDocumentId(), Field.Store.YES));
}
if (file.getContent() != null) {
luceneDocument.add(new TextField("content", file.getContent(), Field.Store.NO));
}
return luceneDocument;
}
}

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.PasswordRecovery;
/**

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.Document;
/**

View File

@ -1,9 +1,9 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.ContributorDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
@ -41,7 +41,6 @@ public class DocumentCreatedAsyncListener {
});
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.createDocument(event.getDocument());
AppContext.getInstance().getIndexingHandler().createDocument(event.getDocument());
}
}

View File

@ -1,12 +1,11 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.event.DocumentDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.event.DocumentDeletedAsyncEvent;
/**
* Listener on document deleted.
*
@ -31,7 +30,6 @@ public class DocumentDeletedAsyncListener {
}
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.deleteDocument(documentDeletedAsyncEvent.getDocumentId());
AppContext.getInstance().getIndexingHandler().deleteDocument(documentDeletedAsyncEvent.getDocumentId());
}
}

View File

@ -1,10 +1,10 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.ContributorDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.TransactionUtil;
@ -38,13 +38,12 @@ public class DocumentUpdatedAsyncListener {
TransactionUtil.handle(() -> {
// Update Lucene index
DocumentDao documentDao = new DocumentDao();
LuceneDao luceneDao = new LuceneDao();
Document document = documentDao.getById(event.getDocumentId());
if (document == null) {
// Document deleted since event fired
return;
}
luceneDao.updateDocument(document);
AppContext.getInstance().getIndexingHandler().updateDocument(document);
// Update contributors list
ContributorDao contributorDao = new ContributorDao();

View File

@ -1,8 +1,8 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.FileUtil;
import org.slf4j.Logger;
@ -36,7 +36,6 @@ public class FileDeletedAsyncListener {
FileUtil.delete(file);
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.deleteDocument(file.getId());
AppContext.getInstance().getIndexingHandler().deleteDocument(file.getId());
}
}

View File

@ -1,12 +1,12 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.FileCreatedAsyncEvent;
import com.sismics.docs.core.event.FileEvent;
import com.sismics.docs.core.event.FileUpdatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.DirectoryUtil;
@ -54,8 +54,7 @@ public class FileProcessingAsyncListener {
processFile(event);
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.createFile(event.getFile());
AppContext.getInstance().getIndexingHandler().createFile(event.getFile());
FileUtil.endProcessingFile(event.getFile().getId());
}
@ -74,8 +73,7 @@ public class FileProcessingAsyncListener {
processFile(event);
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.updateFile(event.getFile());
AppContext.getInstance().getIndexingHandler().updateFile(event.getFile());
FileUtil.endProcessingFile(event.getFile().getId());
}

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.event.PasswordLostEvent;
import com.sismics.docs.core.model.jpa.PasswordRecovery;
import com.sismics.docs.core.util.TransactionUtil;

View File

@ -1,10 +1,10 @@
package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.TransactionUtil;
@ -46,8 +46,7 @@ public class RebuildIndexAsyncListener {
List<File> fileList = fileDao.findAll();
// Rebuild index
LuceneDao luceneDao = new LuceneDao();
luceneDao.rebuildIndex(documentList, fileList);
AppContext.getInstance().getIndexingHandler().rebuildIndex(documentList, fileList);
});
}
}

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.event.RouteStepValidateEvent;
import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.util.EmailUtil;

View File

@ -1,28 +0,0 @@
package com.sismics.docs.core.listener.sync;
import com.google.common.eventbus.DeadEvent;
import com.google.common.eventbus.Subscribe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Listener for all unprocessed events.
*
* @author jtremeaux
*/
public class DeadEventListener {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(DeadEventListener.class);
/**
* Process every dead event.
*
* @param deadEvent Catchall event
*/
@Subscribe
public void onDeadEvent(DeadEvent deadEvent) {
log.error("Dead event catched: " + deadEvent.toString());
}
}

View File

@ -2,17 +2,14 @@ package com.sismics.docs.core.model.context;
import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.ConfigDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.listener.sync.DeadEventListener;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.service.IndexingService;
import com.sismics.docs.core.util.PdfUtil;
import com.sismics.docs.core.util.indexing.IndexingHandler;
import com.sismics.docs.core.util.indexing.LuceneIndexingHandler;
import com.sismics.util.EnvironmentUtil;
import java.util.ArrayList;
@ -33,11 +30,6 @@ public class AppContext {
*/
private static AppContext instance;
/**
* Event bus.
*/
private EventBus eventBus;
/**
* Generic asynchronous event bus.
*/
@ -49,9 +41,9 @@ public class AppContext {
private EventBus mailEventBus;
/**
* Indexing service.
* Indexing handler.
*/
private IndexingService indexingService;
private IndexingHandler indexingHandler;
/**
* Inbox scanning service.
@ -69,17 +61,19 @@ public class AppContext {
private AppContext() {
resetEventBus();
// Start indexing service
ConfigDao configDao = new ConfigDao();
Config luceneStorageConfig = configDao.getById(ConfigType.LUCENE_DIRECTORY_STORAGE);
indexingService = new IndexingService(luceneStorageConfig != null ? luceneStorageConfig.getValue() : null);
indexingService.startAsync();
indexingService.awaitRunning();
// Start indexing handler
indexingHandler = new LuceneIndexingHandler();
try {
indexingHandler.startUp();
} catch (Exception e) {
// Blocking error, the app will not start
throw new RuntimeException(e);
}
// Start inbox service
inboxService = new InboxService();
inboxService.startAsync();
indexingService.awaitRunning();
inboxService.awaitRunning();
// Register fonts
PdfUtil.registerFonts();
@ -111,9 +105,6 @@ public class AppContext {
* (Re)-initializes the event buses.
*/
private void resetEventBus() {
eventBus = new EventBus();
eventBus.register(new DeadEventListener());
asyncExecutorList = new ArrayList<>();
asyncEventBus = newAsyncEventBus();
@ -142,29 +133,6 @@ public class AppContext {
return instance;
}
/**
* Wait for termination of all asynchronous events.
* /!\ Must be used only in unit tests and never a multi-user environment.
*/
public void waitForAsync() {
if (EnvironmentUtil.isUnitTest()) {
return;
}
try {
for (ExecutorService executor : asyncExecutorList) {
// Shutdown executor, don't accept any more tasks (can cause error with nested events)
try {
executor.shutdown();
executor.awaitTermination(60, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// NOP
}
}
} finally {
resetEventBus();
}
}
/**
* Creates a new asynchronous event bus.
*
@ -177,16 +145,12 @@ public class AppContext {
// /!\ Don't add more threads because a cleanup event is fired at the end of each request
ThreadPoolExecutor executor = new ThreadPoolExecutor(1, 1,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>());
new LinkedBlockingQueue<>());
asyncExecutorList.add(executor);
return new AsyncEventBus(executor);
}
}
public EventBus getEventBus() {
return eventBus;
}
public EventBus getAsyncEventBus() {
return asyncEventBus;
}
@ -195,11 +159,34 @@ public class AppContext {
return mailEventBus;
}
public IndexingService getIndexingService() {
return indexingService;
public IndexingHandler getIndexingHandler() {
return indexingHandler;
}
public InboxService getInboxService() {
return inboxService;
}
public void shutDown() {
for (ExecutorService executor : asyncExecutorList) {
// Shutdown executor, don't accept any more tasks (can cause error with nested events)
try {
executor.shutdown();
executor.awaitTermination(60, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// NOP
}
}
if (indexingHandler != null) {
indexingHandler.shutDown();
}
if (inboxService != null) {
inboxService.stopAsync();
inboxService.awaitTerminated();
}
instance = null;
}
}

View File

@ -3,7 +3,7 @@ package com.sismics.docs.core.service;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;

View File

@ -1,167 +0,0 @@
package com.sismics.docs.core.service;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.CheckIndex.Status;
import org.apache.lucene.index.CheckIndex.Status.SegmentInfoStatus;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Path;
import java.util.concurrent.TimeUnit;
/**
* Indexing service.
*
* @author bgamard
*/
public class IndexingService extends AbstractScheduledService {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(IndexingService.class);
/**
* Lucene directory.
*/
private Directory directory;
/**
* Directory reader.
*/
private DirectoryReader directoryReader;
/**
* Lucene storage config.
*/
private String luceneStorageConfig;
public IndexingService(String luceneStorageConfig) {
this.luceneStorageConfig = luceneStorageConfig;
}
@Override
protected void startUp() {
// RAM directory storage by default
if (luceneStorageConfig == null || luceneStorageConfig.equals(Constants.LUCENE_DIRECTORY_STORAGE_RAM)) {
directory = new RAMDirectory();
log.info("Using RAM Lucene storage");
} else if (luceneStorageConfig.equals(Constants.LUCENE_DIRECTORY_STORAGE_FILE)) {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory);
try {
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
} catch (IOException e) {
log.error("Error initializing Lucene index", e);
}
}
// Check index version and rebuild it if necessary
try {
if (DirectoryReader.indexExists(directory)) {
log.info("Checking index health and version");
try (CheckIndex checkIndex = new CheckIndex(directory)) {
Status status = checkIndex.checkIndex();
if (status.clean) {
for (SegmentInfoStatus segmentInfo : status.segmentInfos) {
if (!segmentInfo.version.onOrAfter(Version.LATEST)) {
log.info("Index is old (" + segmentInfo.version + "), rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
break;
}
}
} else {
log.info("Index is dirty, rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
}
}
}
} catch (Exception e) {
log.error("Error checking index", e);
}
}
@Override
protected void shutDown() {
if (directoryReader != null) {
try {
directoryReader.close();
} catch (IOException e) {
log.error("Error closing the index reader", e);
}
}
if (directory != null) {
try {
directory.close();
} catch (IOException e) {
log.error("Error closing Lucene index", e);
}
}
}
@Override
protected void runOneIteration() {
TransactionUtil.handle(() -> {
// NOP
});
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedDelaySchedule(0, 1, TimeUnit.HOURS);
}
/**
* Getter of directory.
*
* @return the directory
*/
public Directory getDirectory() {
return directory;
}
/**
* Returns a valid directory reader.
* Take care of reopening the reader if the index has changed
* and closing the previous one.
*
* @return the directoryReader
*/
public DirectoryReader getDirectoryReader() {
if (directoryReader == null) {
try {
if (!DirectoryReader.indexExists(directory)) {
return null;
}
directoryReader = DirectoryReader.open(directory);
} catch (IOException e) {
log.error("Error creating the directory reader", e);
}
} else {
try {
DirectoryReader newReader = DirectoryReader.openIfChanged(directoryReader);
if (newReader != null) {
directoryReader.close();
directoryReader = newReader;
}
} catch (IOException e) {
log.error("Error while reopening the directory reader", e);
}
}
return directoryReader;
}
}

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.constant.ActionType;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.util.action.Action;
import com.sismics.docs.core.util.action.AddTagAction;
import com.sismics.docs.core.util.action.RemoveTagAction;

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.AuditLogDao;
import com.sismics.docs.core.dao.AuditLogDao;
import com.sismics.docs.core.model.jpa.AuditLog;
import com.sismics.docs.core.model.jpa.Loggable;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.jpa.ConfigDao;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.model.jpa.Config;
import java.util.ResourceBundle;

View File

@ -2,8 +2,8 @@ package com.sismics.docs.core.util;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.model.jpa.Document;

View File

@ -5,8 +5,8 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.event.FileCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.File;

View File

@ -1,85 +0,0 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.model.context.AppContext;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.store.Directory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Lucene utils.
*
* @author bgamard
*/
public class LuceneUtil {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(LuceneUtil.class);
/**
* Encapsulate a process into a Lucene context.
*
* @param runnable Runnable
*/
public static void handle(LuceneRunnable runnable) {
// Standard analyzer
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
// Automatically commit when closing this writer
config.setCommitOnClose(true);
// Merge sequentially, because Lucene writing is already done asynchronously
config.setMergeScheduler(new SerialMergeScheduler());
// Creating index writer
Directory directory = AppContext.getInstance().getIndexingService().getDirectory();
IndexWriter indexWriter = null;
try {
indexWriter = new IndexWriter(directory, config);
} catch (IOException e) {
log.error("Cannot create IndexWriter", e);
}
try {
runnable.run(indexWriter);
} catch (Exception e) {
log.error("Error in running index writing transaction", e);
try {
if (indexWriter != null) {
indexWriter.rollback();
}
} catch (IOException e1) {
log.error("Cannot rollback index writing transaction", e1);
}
}
try {
if (indexWriter != null) {
indexWriter.close();
}
} catch (IOException e) {
log.error("Cannot commit and close IndexWriter", e);
}
}
/**
* Lucene runnable.
*
* @author bgamard
*/
public interface LuceneRunnable {
/**
* Code to run in a Lucene context.
*
* @param indexWriter Index writer
* @throws Exception e
*/
void run(IndexWriter indexWriter) throws Exception;
}
}

View File

@ -6,7 +6,7 @@ import com.google.common.io.Closer;
import com.google.common.io.Resources;
import com.lowagie.text.FontFactory;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.format.FormatHandler;
import com.sismics.docs.core.util.format.FormatHandlerUtil;

View File

@ -4,12 +4,12 @@ import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.criteria.UserCriteria;
import com.sismics.docs.core.dao.jpa.dto.RouteStepDto;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.RouteStepDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.event.RouteStepValidateEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Acl;

View File

@ -1,8 +1,8 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.dao.jpa.GroupDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.GroupDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.Group;
import com.sismics.docs.core.model.jpa.User;

View File

@ -1,7 +1,7 @@
package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.dto.TagDto;
import java.util.List;

View File

@ -1,6 +1,6 @@
package com.sismics.docs.core.util.action;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.DocumentDto;
import javax.json.JsonObject;

View File

@ -1,10 +1,10 @@
package com.sismics.docs.core.util.action;
import com.google.common.collect.Sets;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import java.util.List;

View File

@ -1,10 +1,10 @@
package com.sismics.docs.core.util.action;
import com.google.common.collect.Sets;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import java.util.List;

View File

@ -1,12 +1,17 @@
package com.sismics.docs.core.util.action;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import javax.json.JsonObject;
import java.util.List;
/**
* Abstract class for tag actions.
*
* @author bgamard
*/
public abstract class TagAction implements Action {
@Override
public void validate(JsonObject action) throws Exception {

View File

@ -1,6 +1,6 @@
package com.sismics.docs.core.util.authentication;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.util.ClasspathScanner;

View File

@ -0,0 +1,82 @@
package com.sismics.docs.core.util.indexing;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.SortCriteria;
import java.util.List;
/**
* Indexing handler.
*
* @author bgamard
*/
public interface IndexingHandler {
/**
* Start the indexing handler.
*
* @throws Exception e
*/
void startUp() throws Exception;
/**
* Shutdown the indexing handler.
*/
void shutDown();
/**
* Fully rebuild the index.
*
* @param documentList All documents
* @param fileList All files
*/
void rebuildIndex(List<Document> documentList, List<File> fileList);
/**
* Index a new document.
*
* @param document Document
*/
void createDocument(Document document);
/**
* Index a new file.
*
* @param file File
*/
void createFile(File file);
/**
* Update an indexed document.
*
* @param document Document
*/
void updateDocument(Document document);
/**
* Update an indexed file.
*
* @param file File
*/
void updateFile(File file);
/**
* Delete a file or a document.
*
* @param id ID
*/
void deleteDocument(String id);
/**
* Searches documents by criteria.
*
* @param paginatedList List of documents (updated by side effects)
* @param criteria Search criteria
* @param sortCriteria Sort criteria
* @throws Exception e
*/
void findByCriteria(PaginatedList<DocumentDto> paginatedList, DocumentCriteria criteria, SortCriteria sortCriteria) throws Exception;
}

View File

@ -0,0 +1,511 @@
package com.sismics.docs.core.util.indexing;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.SortCriteria;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil;
import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Path;
import java.sql.Timestamp;
import java.util.*;
/**
* Lucene indexing handler.
* Documents and files are indexed in Lucene, but only used for fulltext search.
* Other search criteria are still using the database.
*
* @author bgamard
*/
public class LuceneIndexingHandler implements IndexingHandler {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(LuceneIndexingHandler.class);
/**
* Lucene directory.
*/
private Directory directory;
/**
* Directory reader.
*/
private DirectoryReader directoryReader;
@Override
public void startUp() throws Exception {
ConfigDao configDao = new ConfigDao();
Config luceneStorageConfig = configDao.getById(ConfigType.LUCENE_DIRECTORY_STORAGE);
String luceneStorage = luceneStorageConfig == null ? null : luceneStorageConfig.getValue();
// RAM directory storage by default
if (luceneStorage == null || luceneStorage.equals("RAM")) {
directory = new RAMDirectory();
log.info("Using RAM Lucene storage");
} else if (luceneStorage.equals("FILE")) {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory);
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
}
// Check index version and rebuild it if necessary
if (DirectoryReader.indexExists(directory)) {
log.info("Checking index health and version");
try (CheckIndex checkIndex = new CheckIndex(directory)) {
CheckIndex.Status status = checkIndex.checkIndex();
if (status.clean) {
for (CheckIndex.Status.SegmentInfoStatus segmentInfo : status.segmentInfos) {
if (!segmentInfo.version.onOrAfter(Version.LATEST)) {
log.info("Index is old (" + segmentInfo.version + "), rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
break;
}
}
} else {
log.info("Index is dirty, rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
}
}
}
}
@Override
public void shutDown() {
if (directoryReader != null) {
try {
directoryReader.close();
} catch (IOException e) {
log.error("Error closing the index reader", e);
}
}
if (directory != null) {
try {
directory.close();
} catch (IOException e) {
log.error("Error closing Lucene index", e);
}
}
}
@Override
public void rebuildIndex(final List<Document> documentList, final List<File> fileList) {
handle(indexWriter -> {
// Empty index
indexWriter.deleteAll();
// Add all documents
for (Document document : documentList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.addDocument(luceneDocument);
}
// Add all files
for (File file : fileList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.addDocument(luceneDocument);
}
});
}
@Override
public void createDocument(final Document document) {
handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.addDocument(luceneDocument);
});
}
@Override
public void createFile(final File file) {
handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.addDocument(luceneDocument);
});
}
@Override
public void updateFile(final File file) {
handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.updateDocument(new Term("id", file.getId()), luceneDocument);
});
}
@Override
public void updateDocument(final Document document) {
handle(indexWriter -> {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.updateDocument(new Term("id", document.getId()), luceneDocument);
});
}
@Override
public void deleteDocument(final String id) {
handle(indexWriter -> indexWriter.deleteDocuments(new Term("id", id)));
}
@Override
public void findByCriteria(PaginatedList<DocumentDto> paginatedList, DocumentCriteria criteria, SortCriteria sortCriteria) throws Exception {
Map<String, Object> parameterMap = new HashMap<>();
List<String> criteriaList = new ArrayList<>();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, ");
sb.append(" s.count c5, ");
sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
sb.append(" from T_DOCUMENT d ");
sb.append(" left join (SELECT count(s.SHA_ID_C) count, ac.ACL_SOURCEID_C " +
" FROM T_SHARE s, T_ACL ac " +
" WHERE ac.ACL_TARGETID_C = s.SHA_ID_C AND ac.ACL_DELETEDATE_D IS NULL AND " +
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D IS NULL group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
"where rs.RTP_IDTARGET_C in (:targetIdList)) rs2 on rs2.idDocument = d.DOC_ID_C ");
// Add search criterias
if (criteria.getTargetIdList() != null) {
// Read permission is enough for searching
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
sb.append(" left join T_DOCUMENT_TAG dta on dta.DOT_IDDOCUMENT_C = d.DOC_ID_C and dta.DOT_DELETEDATE_D is null ");
sb.append(" left join T_ACL a2 on a2.ACL_TARGETID_C in (:targetIdList) and a2.ACL_SOURCEID_C = dta.DOT_IDTAG_C and a2.ACL_PERM_C = 'READ' and a2.ACL_DELETEDATE_D is null ");
criteriaList.add("(a.ACL_ID_C is not null or a2.ACL_ID_C is not null)");
parameterMap.put("targetIdList", criteria.getTargetIdList());
}
if (!Strings.isNullOrEmpty(criteria.getSearch()) || !Strings.isNullOrEmpty(criteria.getFullSearch())) {
Set<String> documentIdList = search(criteria.getSearch(), criteria.getFullSearch());
if (documentIdList.isEmpty()) {
// If the search doesn't find any document, the request should return nothing
documentIdList.add(UUID.randomUUID().toString());
}
criteriaList.add("d.DOC_ID_C in :documentIdList");
parameterMap.put("documentIdList", documentIdList);
}
if (criteria.getCreateDateMin() != null) {
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
parameterMap.put("createDateMin", criteria.getCreateDateMin());
}
if (criteria.getCreateDateMax() != null) {
criteriaList.add("d.DOC_CREATEDATE_D <= :createDateMax");
parameterMap.put("createDateMax", criteria.getCreateDateMax());
}
if (criteria.getUpdateDateMin() != null) {
criteriaList.add("d.DOC_UPDATEDATE_D >= :updateDateMin");
parameterMap.put("updateDateMin", criteria.getUpdateDateMin());
}
if (criteria.getUpdateDateMax() != null) {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
}
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
int index = 0;
List<String> tagCriteriaList = Lists.newArrayList();
for (String tagId : criteria.getTagIdList()) {
sb.append(String.format("left join T_DOCUMENT_TAG dt%d on dt%d.DOT_IDDOCUMENT_C = d.DOC_ID_C and dt%d.DOT_IDTAG_C = :tagId%d and dt%d.DOT_DELETEDATE_D is null ", index, index, index, index, index));
parameterMap.put("tagId" + index, tagId);
tagCriteriaList.add(String.format("dt%d.DOT_ID_C is not null", index));
index++;
}
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
}
if (criteria.getShared() != null && criteria.getShared()) {
criteriaList.add("s.count > 0");
}
if (criteria.getLanguage() != null) {
criteriaList.add("d.DOC_LANGUAGE_C = :language");
parameterMap.put("language", criteria.getLanguage());
}
if (criteria.getCreatorId() != null) {
criteriaList.add("d.DOC_IDUSER_C = :creatorId");
parameterMap.put("creatorId", criteria.getCreatorId());
}
if (criteria.getActiveRoute() != null && criteria.getActiveRoute()) {
criteriaList.add("rs2.RTP_ID_C is not null");
}
criteriaList.add("d.DOC_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria);
// Assemble results
List<DocumentDto> documentDtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
DocumentDto documentDto = new DocumentDto();
documentDto.setId((String) o[i++]);
documentDto.setTitle((String) o[i++]);
documentDto.setDescription((String) o[i++]);
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++];
documentDto.setFileCount(fileCount == null ? 0 : fileCount.intValue());
documentDto.setActiveRoute(o[i++] != null);
documentDto.setCurrentStepName((String) o[i++]);
documentDto.setUpdateTimestamp(((Timestamp) o[i]).getTime());
documentDtoList.add(documentDto);
}
paginatedList.setResultList(documentDtoList);
}
/**
* Fulltext search in files and documents.
*
* @param searchQuery Search query on metadatas
* @param fullSearchQuery Search query on all fields
* @return List of document IDs
* @throws Exception e
*/
private Set<String> search(String searchQuery, String fullSearchQuery) throws Exception {
// Escape query and add quotes so QueryParser generate a PhraseQuery
searchQuery = "\"" + QueryParserUtil.escape(searchQuery + " " + fullSearchQuery) + "\"";
fullSearchQuery = "\"" + QueryParserUtil.escape(fullSearchQuery) + "\"";
// Build search query
StandardQueryParser qpHelper = new StandardQueryParser(new StandardAnalyzer());
qpHelper.setPhraseSlop(100); // PhraseQuery add terms
// Search on documents and files
BooleanQuery query = new BooleanQuery.Builder()
.add(qpHelper.parse(searchQuery, "title"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "description"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "subject"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "identifier"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "publisher"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "format"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "source"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "type"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "coverage"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "rights"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "filename"), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(fullSearchQuery, "content"), BooleanClause.Occur.SHOULD)
.build();
// Search
DirectoryReader directoryReader = getDirectoryReader();
Set<String> documentIdList = new HashSet<>();
if (directoryReader == null) {
// The directory reader is not yet initialized (probably because there is nothing indexed)
return documentIdList;
}
IndexSearcher searcher = new IndexSearcher(directoryReader);
TopDocs topDocs = searcher.search(query, Integer.MAX_VALUE);
ScoreDoc[] docs = topDocs.scoreDocs;
// Extract document IDs
for (ScoreDoc doc : docs) {
org.apache.lucene.document.Document document = searcher.doc(doc.doc);
String type = document.get("doctype");
String documentId = null;
if (type.equals("document")) {
documentId = document.get("id");
} else if (type.equals("file")) {
documentId = document.get("document_id");
}
if (documentId != null) {
documentIdList.add(documentId);
}
}
return documentIdList;
}
/**
* Build Lucene document from database document.
*
* @param document Document
* @return Document
*/
private org.apache.lucene.document.Document getDocumentFromDocument(Document document) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", document.getId(), Field.Store.YES));
luceneDocument.add(new StringField("doctype", "document", Field.Store.YES));
luceneDocument.add(new TextField("title", document.getTitle(), Field.Store.NO));
if (document.getDescription() != null) {
luceneDocument.add(new TextField("description", document.getDescription(), Field.Store.NO));
}
if (document.getSubject() != null) {
luceneDocument.add(new TextField("subject", document.getSubject(), Field.Store.NO));
}
if (document.getIdentifier() != null) {
luceneDocument.add(new TextField("identifier", document.getIdentifier(), Field.Store.NO));
}
if (document.getPublisher() != null) {
luceneDocument.add(new TextField("publisher", document.getPublisher(), Field.Store.NO));
}
if (document.getFormat() != null) {
luceneDocument.add(new TextField("format", document.getFormat(), Field.Store.NO));
}
if (document.getSource() != null) {
luceneDocument.add(new TextField("source", document.getSource(), Field.Store.NO));
}
if (document.getType() != null) {
luceneDocument.add(new TextField("type", document.getType(), Field.Store.NO));
}
if (document.getCoverage() != null) {
luceneDocument.add(new TextField("coverage", document.getCoverage(), Field.Store.NO));
}
if (document.getRights() != null) {
luceneDocument.add(new TextField("rights", document.getRights(), Field.Store.NO));
}
return luceneDocument;
}
/**
* Build Lucene document from file.
*
* @param file File
* @return Document
*/
private org.apache.lucene.document.Document getDocumentFromFile(File file) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", file.getId(), Field.Store.YES));
luceneDocument.add(new StringField("doctype", "file", Field.Store.YES));
if (file.getName() != null) {
luceneDocument.add(new TextField("filename", file.getName(), Field.Store.NO));
}
if (file.getDocumentId() != null) {
luceneDocument.add(new StringField("document_id", file.getDocumentId(), Field.Store.YES));
}
if (file.getContent() != null) {
luceneDocument.add(new TextField("content", file.getContent(), Field.Store.NO));
}
return luceneDocument;
}
/**
* Returns a valid directory reader.
* Take care of reopening the reader if the index has changed
* and closing the previous one.
*
* @return the directoryReader
*/
private DirectoryReader getDirectoryReader() {
if (directoryReader == null) {
try {
if (!DirectoryReader.indexExists(directory)) {
return null;
}
directoryReader = DirectoryReader.open(directory);
} catch (IOException e) {
log.error("Error creating the directory reader", e);
}
} else {
try {
DirectoryReader newReader = DirectoryReader.openIfChanged(directoryReader);
if (newReader != null) {
directoryReader.close();
directoryReader = newReader;
}
} catch (IOException e) {
log.error("Error while reopening the directory reader", e);
}
}
return directoryReader;
}
/**
* Encapsulate a process into a Lucene context.
*
* @param runnable Runnable
*/
private void handle(LuceneRunnable runnable) {
// Standard analyzer
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
// Automatically commit when closing this writer
config.setCommitOnClose(true);
// Merge sequentially, because Lucene writing is already done asynchronously
config.setMergeScheduler(new SerialMergeScheduler());
// Creating index writer
IndexWriter indexWriter = null;
try {
indexWriter = new IndexWriter(directory, config);
} catch (IOException e) {
log.error("Cannot create IndexWriter", e);
}
try {
runnable.run(indexWriter);
} catch (Exception e) {
log.error("Error in running index writing transaction", e);
try {
if (indexWriter != null) {
indexWriter.rollback();
}
} catch (IOException e1) {
log.error("Cannot rollback index writing transaction", e1);
}
}
try {
if (indexWriter != null) {
indexWriter.close();
}
} catch (IOException e) {
log.error("Cannot commit and close IndexWriter", e);
}
}
/**
* Lucene runnable.
*
* @author bgamard
*/
private interface LuceneRunnable {
/**
* Code to run in a Lucene context.
*
* @param indexWriter Index writer
* @throws Exception e
*/
void run(IndexWriter indexWriter) throws Exception;
}
}

View File

@ -4,8 +4,8 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.ConfigDao;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.context.ThreadLocalContext;

View File

@ -1,6 +1,7 @@
package com.sismics.docs.core.dao.jpa;
import com.sismics.docs.BaseTransactionalTest;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.authentication.InternalAuthenticationHandler;

View File

@ -2,7 +2,7 @@ package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.format.*;
import com.sismics.util.mime.MimeType;

View File

@ -2,8 +2,8 @@ package com.sismics.rest.util;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.dto.AclDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.dto.AclDto;
import com.sismics.util.JsonUtil;
import javax.json.Json;

View File

@ -1,7 +1,7 @@
package com.sismics.util.filter;
import com.google.common.base.Strings;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.User;
import javax.servlet.FilterConfig;

View File

@ -64,7 +64,7 @@ public class RequestContextFilter implements Filter {
@Override
public void destroy() {
// NOP
AppContext.getInstance().shutDown();
}
@Override

View File

@ -1,10 +1,10 @@
package com.sismics.util.filter;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.GroupDao;
import com.sismics.docs.core.dao.jpa.RoleBaseFunctionDao;
import com.sismics.docs.core.dao.jpa.criteria.GroupCriteria;
import com.sismics.docs.core.dao.jpa.dto.GroupDto;
import com.sismics.docs.core.dao.GroupDao;
import com.sismics.docs.core.dao.RoleBaseFunctionDao;
import com.sismics.docs.core.dao.criteria.GroupCriteria;
import com.sismics.docs.core.dao.dto.GroupDto;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.security.AnonymousPrincipal;
import com.sismics.security.UserPrincipal;

View File

@ -1,7 +1,7 @@
package com.sismics.util.filter;
import com.sismics.docs.core.dao.jpa.AuthenticationTokenDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.AuthenticationTokenDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.docs.core.model.jpa.User;

View File

@ -79,6 +79,7 @@ public abstract class BaseJerseyTest extends JerseyTest {
httpServer = HttpServer.createSimpleServer(getClass().getResource("/").getFile(), "localhost", getPort());
WebappContext context = new WebappContext("GrizzlyContext", "/docs");
context.addListener("com.twelvemonkeys.servlet.image.IIOProviderContextListener");
context.addFilter("requestContextFilter", RequestContextFilter.class)
.addMappingForUrlPatterns(null, "/*");
context.addFilter("tokenBasedSecurityFilter", TokenBasedSecurityFilter.class)

View File

@ -4,11 +4,11 @@ import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.*;
import com.sismics.docs.core.dao.jpa.criteria.GroupCriteria;
import com.sismics.docs.core.dao.jpa.criteria.UserCriteria;
import com.sismics.docs.core.dao.jpa.dto.GroupDto;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.*;
import com.sismics.docs.core.dao.criteria.GroupCriteria;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.GroupDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;

View File

@ -3,10 +3,10 @@ package com.sismics.docs.rest.resource;
import com.google.common.base.Strings;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.ConfigDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Config;

View File

@ -2,10 +2,10 @@ package com.sismics.docs.rest.resource;
import com.google.common.base.Strings;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.AuditLogDao;
import com.sismics.docs.core.dao.jpa.criteria.AuditLogCriteria;
import com.sismics.docs.core.dao.jpa.dto.AuditLogDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.AuditLogDao;
import com.sismics.docs.core.dao.criteria.AuditLogCriteria;
import com.sismics.docs.core.dao.dto.AuditLogDto;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
import com.sismics.docs.core.util.jpa.SortCriteria;

View File

@ -1,9 +1,9 @@
package com.sismics.docs.rest.resource;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.CommentDao;
import com.sismics.docs.core.dao.jpa.dto.CommentDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.CommentDao;
import com.sismics.docs.core.dao.dto.CommentDto;
import com.sismics.docs.core.model.jpa.Comment;
import com.sismics.rest.exception.ForbiddenClientException;
import com.sismics.rest.util.ValidationUtil;

View File

@ -6,14 +6,15 @@ import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.*;
import com.sismics.docs.core.dao.jpa.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.*;
import com.sismics.docs.core.dao.*;
import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.*;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.event.DocumentDeletedAsyncEvent;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.model.jpa.User;
@ -363,14 +364,13 @@ public class DocumentResource extends BaseResource {
JsonObjectBuilder response = Json.createObjectBuilder();
JsonArrayBuilder documents = Json.createArrayBuilder();
DocumentDao documentDao = new DocumentDao();
TagDao tagDao = new TagDao();
PaginatedList<DocumentDto> paginatedList = PaginatedLists.create(limit, offset);
SortCriteria sortCriteria = new SortCriteria(sortColumn, asc);
DocumentCriteria documentCriteria = parseSearchQuery(search);
documentCriteria.setTargetIdList(getTargetIdList(null));
try {
documentDao.findByCriteria(paginatedList, documentCriteria, sortCriteria);
AppContext.getInstance().getIndexingHandler().findByCriteria(paginatedList, documentCriteria, sortCriteria);
} catch (Exception e) {
throw new ServerException("SearchError", "Error searching in documents", e);
}

View File

@ -4,11 +4,11 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.event.FileUpdatedAsyncEvent;

View File

@ -1,12 +1,12 @@
package com.sismics.docs.rest.resource;
import com.google.common.base.Strings;
import com.sismics.docs.core.dao.jpa.GroupDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.criteria.GroupCriteria;
import com.sismics.docs.core.dao.jpa.criteria.UserCriteria;
import com.sismics.docs.core.dao.jpa.dto.GroupDto;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.GroupDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.GroupCriteria;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.GroupDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.Group;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.model.jpa.UserGroup;

View File

@ -5,12 +5,12 @@ import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.ActionType;
import com.sismics.docs.core.constant.RouteStepTransition;
import com.sismics.docs.core.constant.RouteStepType;
import com.sismics.docs.core.dao.jpa.GroupDao;
import com.sismics.docs.core.dao.jpa.RouteModelDao;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.criteria.RouteModelCriteria;
import com.sismics.docs.core.dao.jpa.dto.RouteModelDto;
import com.sismics.docs.core.dao.GroupDao;
import com.sismics.docs.core.dao.RouteModelDao;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.RouteModelCriteria;
import com.sismics.docs.core.dao.dto.RouteModelDto;
import com.sismics.docs.core.model.jpa.Group;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.docs.core.model.jpa.User;

View File

@ -1,12 +1,12 @@
package com.sismics.docs.rest.resource;
import com.sismics.docs.core.constant.*;
import com.sismics.docs.core.dao.jpa.*;
import com.sismics.docs.core.dao.jpa.criteria.RouteCriteria;
import com.sismics.docs.core.dao.jpa.criteria.RouteStepCriteria;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.jpa.dto.RouteDto;
import com.sismics.docs.core.dao.jpa.dto.RouteStepDto;
import com.sismics.docs.core.dao.*;
import com.sismics.docs.core.dao.criteria.RouteCriteria;
import com.sismics.docs.core.dao.criteria.RouteStepCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.dao.dto.RouteDto;
import com.sismics.docs.core.dao.dto.RouteStepDto;
import com.sismics.docs.core.model.jpa.Route;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.docs.core.model.jpa.RouteStep;

View File

@ -4,8 +4,8 @@ package com.sismics.docs.rest.resource;
import com.sismics.docs.core.constant.AclTargetType;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.ShareDao;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.ShareDao;
import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.model.jpa.Share;
import com.sismics.rest.exception.ClientException;

View File

@ -3,10 +3,10 @@ package com.sismics.docs.rest.resource;
import com.google.common.collect.Sets;
import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.criteria.TagCriteria;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.jpa.SortCriteria;

View File

@ -3,7 +3,7 @@ package com.sismics.docs.rest.resource;
import com.google.common.base.Strings;
import com.google.common.io.ByteStreams;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.jpa.ConfigDao;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.rest.constant.BaseFunction;

View File

@ -4,11 +4,11 @@ import com.google.common.base.Strings;
import com.google.common.collect.Sets;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.*;
import com.sismics.docs.core.dao.jpa.criteria.GroupCriteria;
import com.sismics.docs.core.dao.jpa.criteria.UserCriteria;
import com.sismics.docs.core.dao.jpa.dto.GroupDto;
import com.sismics.docs.core.dao.jpa.dto.UserDto;
import com.sismics.docs.core.dao.*;
import com.sismics.docs.core.dao.criteria.GroupCriteria;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.GroupDto;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.event.DocumentDeletedAsyncEvent;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.event.PasswordLostEvent;

View File

@ -1,6 +1,6 @@
package com.sismics.docs.rest.resource;
import com.sismics.docs.core.dao.jpa.VocabularyDao;
import com.sismics.docs.core.dao.VocabularyDao;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.docs.rest.constant.BaseFunction;
import com.sismics.rest.exception.ForbiddenClientException;

View File

@ -1,6 +1,5 @@
package com.sismics.docs.rest;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.util.filter.TokenBasedSecurityFilter;
import com.sismics.util.totp.GoogleAuthenticator;
import org.junit.Assert;
@ -419,7 +418,6 @@ public class TestUserResource extends BaseJerseyTest {
.post(Entity.form(new Form()
.param("username", "absent_minded")), JsonObject.class);
Assert.assertEquals("ok", json.getString("status"));
AppContext.getInstance().waitForAsync();
String emailBody = popEmail();
Assert.assertNotNull("No email to consume", emailBody);
Assert.assertTrue(emailBody.contains("Please reset your password"));