Merge pull request #76 from sismics/master

Push to production
This commit is contained in:
Benjamin Gamard 2016-03-02 00:43:37 +01:00
commit 1f6d9f0211
88 changed files with 2144 additions and 1104 deletions

View File

@ -24,6 +24,7 @@ Features
- Support image, PDF, ODT and DOCX files
- Flexible search engine
- Full text search in image and PDF
- All [Dublin Core](http://dublincore.org/) metadata
- 256-bit AES encryption
- Tag system with relations
- Multi-users ACL system

View File

@ -61,13 +61,6 @@
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/jni" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/assets" type="java-test-resource" />
@ -75,11 +68,15 @@
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/rs" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/assets" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/blame" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/builds" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/dependency-cache" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/appcompat-v7/23.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/recyclerview-v7/23.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/support-v4/23.1.1/jars" />
@ -87,21 +84,11 @@
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/it.sephiroth.android.library.easing/android-easing/1.0.3/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/it.sephiroth.android.library.imagezoom/imagezoom/1.0.5/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental-classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental-runtime-classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental-verifier" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/instant-run-support" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/jniLibs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/manifests" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/pre-dexed" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/reload-dex" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/res" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/restart-dex" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/rs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/symbols" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/transforms" />
<excludeFolder url="file://$MODULE_DIR$/build/outputs" />
<excludeFolder url="file://$MODULE_DIR$/build/tmp" />
</content>
<orderEntry type="jdk" jdkName="Android API 23 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />

View File

@ -3,7 +3,7 @@ buildscript {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:2.0.0-beta3'
classpath 'com.android.tools.build:gradle:2.0.0-beta5'
}
}
apply plugin: 'com.android.application'

View File

@ -93,9 +93,10 @@
<artifactId>lucene-queryparser</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId>
<artifactId>lucene-backward-codecs</artifactId>
</dependency>
<dependency>

View File

@ -26,10 +26,11 @@ public class AclDao {
* Creates a new ACL.
*
* @param acl ACL
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(Acl acl) {
public String create(Acl acl, String userId) {
// Create the UUID
acl.setId(UUID.randomUUID().toString());
@ -38,7 +39,7 @@ public class AclDao {
em.persist(acl);
// Create audit log
AuditLogUtil.create(acl, AuditLogType.CREATE);
AuditLogUtil.create(acl, AuditLogType.CREATE, userId);
return acl.getId();
}
@ -125,9 +126,10 @@ public class AclDao {
* @param sourceId Source ID
* @param perm Permission
* @param targetId Target ID
* @param userId User ID
*/
@SuppressWarnings("unchecked")
public void delete(String sourceId, PermType perm, String targetId) {
public void delete(String sourceId, PermType perm, String targetId, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Create audit log
@ -137,7 +139,7 @@ public class AclDao {
q.setParameter("targetId", targetId);
List<Acl> aclList = q.getResultList();
for (Acl acl : aclList) {
AuditLogUtil.create(acl, AuditLogType.DELETE);
AuditLogUtil.create(acl, AuditLogType.DELETE, userId);
}
// Soft delete the ACLs

View File

@ -59,12 +59,13 @@ public class AuditLogDao {
public void findByCriteria(PaginatedList<AuditLogDto> paginatedList, AuditLogCriteria criteria, SortCriteria sortCriteria) throws Exception {
Map<String, Object> parameterMap = new HashMap<String, Object>();
String baseQuery = "select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, l.LOG_IDENTITY_C c2, l.LOG_CLASSENTITY_C c3, l.LOG_TYPE_C c4, l.LOG_MESSAGE_C c5 from T_AUDIT_LOG l ";
StringBuilder baseQuery = new StringBuilder("select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, u.USE_USERNAME_C c2, l.LOG_IDENTITY_C c3, l.LOG_CLASSENTITY_C c4, l.LOG_TYPE_C c5, l.LOG_MESSAGE_C c6 from T_AUDIT_LOG l ");
baseQuery.append(" join T_USER u on l.LOG_IDUSER_C = u.USE_ID_C ");
List<String> queries = Lists.newArrayList();
// Adds search criteria
if (criteria.getDocumentId() != null) {
// ACL on document is not checked here, it's assumed
// ACL on document is not checked here, rights have been checked before
queries.add(baseQuery + " where l.LOG_IDENTITY_C = :documentId ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select c.COM_ID_C from T_COMMENT c where c.COM_IDDOC_C = :documentId) ");
@ -73,11 +74,9 @@ public class AuditLogDao {
}
if (criteria.getUserId() != null) {
queries.add(baseQuery + " where l.LOG_IDENTITY_C = :userId ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select t.TAG_ID_C from T_TAG t where t.TAG_IDUSER_C = :userId) ");
// Show only logs from owned documents, ACL are lost on delete
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select d.DOC_ID_C from T_DOCUMENT d where d.DOC_IDUSER_C = :userId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select c.COM_ID_C from T_COMMENT c where c.COM_IDUSER_C = :userId) ");
// Get all logs originating from the user, not necessarly on owned items
// Filter out ACL logs
queries.add(baseQuery + " where l.LOG_IDUSER_C = :userId and l.LOG_CLASSENTITY_C != 'Acl' ");
parameterMap.put("userId", criteria.getUserId());
}
@ -92,6 +91,7 @@ public class AuditLogDao {
AuditLogDto auditLogDto = new AuditLogDto();
auditLogDto.setId((String) o[i++]);
auditLogDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
auditLogDto.setUsername((String) o[i++]);
auditLogDto.setEntityId((String) o[i++]);
auditLogDto.setEntityClass((String) o[i++]);
auditLogDto.setType(AuditLogType.valueOf((String) o[i++]));

View File

@ -26,10 +26,11 @@ public class CommentDao {
* Creates a new comment.
*
* @param comment Comment
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(Comment comment) {
public String create(Comment comment, String userId) {
// Create the UUID
comment.setId(UUID.randomUUID().toString());
@ -39,7 +40,7 @@ public class CommentDao {
em.persist(comment);
// Create audit log
AuditLogUtil.create(comment, AuditLogType.CREATE);
AuditLogUtil.create(comment, AuditLogType.CREATE, userId);
return comment.getId();
}
@ -48,21 +49,22 @@ public class CommentDao {
* Deletes a comment.
*
* @param id Comment ID
* @param userId User ID
*/
public void delete(String id) {
public void delete(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
// Get the comment
Query q = em.createQuery("select c from Comment c where c.id = :id and c.deleteDate is null");
q.setParameter("id", id);
Comment commentDb = (Comment) q.getSingleResult();
// Delete the document
// Delete the comment
Date dateNow = new Date();
commentDb.setDeleteDate(dateNow);
// Create audit log
AuditLogUtil.create(commentDb, AuditLogType.DELETE);
AuditLogUtil.create(commentDb, AuditLogType.DELETE, userId);
}
/**

View File

@ -0,0 +1,80 @@
package com.sismics.docs.core.dao.jpa;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import com.sismics.docs.core.dao.jpa.dto.ContributorDto;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.util.context.ThreadLocalContext;
/**
* Contributor DAO.
*
* @author bgamard
*/
public class ContributorDao {
/**
* Creates a new contributor.
*
* @param contributor Contributor
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(Contributor contributor) {
// Create the UUID
contributor.setId(UUID.randomUUID().toString());
// Create the contributor
EntityManager em = ThreadLocalContext.get().getEntityManager();
em.persist(contributor);
return contributor.getId();
}
/**
* Returns the list of all contributors by document.
*
* @param documentId Document ID
* @return List of contributors
*/
@SuppressWarnings("unchecked")
public List<Contributor> findByDocumentId(String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select c from Contributor c where c.documentId = :documentId");
q.setParameter("documentId", documentId);
return q.getResultList();
}
/**
* Returns the list of all contributors by document.
*
* @param documentId Document ID
* @return List of contributors
*/
@SuppressWarnings("unchecked")
public List<ContributorDto> getByDocumentId(String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
sb.append(" join T_USER u on u.USE_ID_C = c.CTR_IDUSER_C ");
sb.append(" where c.CTR_IDDOC_C = :documentId ");
Query q = em.createNativeQuery(sb.toString());
q.setParameter("documentId", documentId);
List<Object[]> l = q.getResultList();
// Assemble results
List<ContributorDto> contributorDtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
ContributorDto contributorDto = new ContributorDto();
contributorDto.setUsername((String) o[i++]);
contributorDto.setEmail((String) o[i++]);
contributorDtoList.add(contributorDto);
}
return contributorDtoList;
}
}

View File

@ -38,10 +38,11 @@ public class DocumentDao {
* Creates a new document.
*
* @param document Document
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(Document document) {
public String create(Document document, String userId) {
// Create the UUID
document.setId(UUID.randomUUID().toString());
@ -50,7 +51,7 @@ public class DocumentDao {
em.persist(document);
// Create audit log
AuditLogUtil.create(document, AuditLogType.CREATE);
AuditLogUtil.create(document, AuditLogType.CREATE, userId);
return document.getId();
}
@ -89,7 +90,7 @@ public class DocumentDao {
*/
public DocumentDto getDocument(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_CREATEDATE_D, d.DOC_LANGUAGE_C, ");
StringBuilder sb = new StringBuilder("select d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_LANGUAGE_C, ");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null), ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C), ");
sb.append(" u.USE_USERNAME_C ");
@ -109,6 +110,14 @@ public class DocumentDao {
documentDto.setId((String) o[i++]);
documentDto.setTitle((String) o[i++]);
documentDto.setDescription((String) o[i++]);
documentDto.setSubject((String) o[i++]);
documentDto.setIdentifier((String) o[i++]);
documentDto.setPublisher((String) o[i++]);
documentDto.setFormat((String) o[i++]);
documentDto.setSource((String) o[i++]);
documentDto.setType((String) o[i++]);
documentDto.setCoverage((String) o[i++]);
documentDto.setRights((String) o[i++]);
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]);
documentDto.setShared(((Number) o[i++]).intValue() > 0);
@ -127,9 +136,10 @@ public class DocumentDao {
*/
public Document getDocument(String id, PermType perm, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createNativeQuery("select d.* from T_DOCUMENT d "
+ " join T_ACL a on a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_TARGETID_C = :userId and a.ACL_PERM_C = :perm and a.ACL_DELETEDATE_D is null "
+ " where d.DOC_ID_C = :id and d.DOC_DELETEDATE_D is null", Document.class);
StringBuilder sb = new StringBuilder("select d.* from T_DOCUMENT d ");
sb.append(" join T_ACL a on a.ACL_SOURCEID_C = d.DOC_ID_C and a.ACL_TARGETID_C = :userId and a.ACL_PERM_C = :perm and a.ACL_DELETEDATE_D is null ");
sb.append(" where d.DOC_ID_C = :id and d.DOC_DELETEDATE_D is null");
Query q = em.createNativeQuery(sb.toString(), Document.class);
q.setParameter("id", id);
q.setParameter("perm", perm.name());
q.setParameter("userId", userId);
@ -144,8 +154,9 @@ public class DocumentDao {
* Deletes a document.
*
* @param id Document ID
* @param userId User ID
*/
public void delete(String id) {
public void delete(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
@ -174,7 +185,7 @@ public class DocumentDao {
q.executeUpdate();
// Create audit log
AuditLogUtil.create(documentDb, AuditLogType.DELETE);
AuditLogUtil.create(documentDb, AuditLogType.DELETE, userId);
}
/**
@ -249,6 +260,10 @@ public class DocumentDao {
criteriaList.add("d.DOC_LANGUAGE_C = :language");
parameterMap.put("language", criteria.getLanguage());
}
if (criteria.getCreatorId() != null) {
criteriaList.add("d.DOC_IDUSER_C = :creatorId");
parameterMap.put("creatorId", criteria.getCreatorId());
}
criteriaList.add("d.DOC_DELETEDATE_D is null");
@ -262,7 +277,7 @@ public class DocumentDao {
List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria);
// Assemble results
List<DocumentDto> documentDtoList = new ArrayList<DocumentDto>();
List<DocumentDto> documentDtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
DocumentDto documentDto = new DocumentDto();
@ -283,9 +298,10 @@ public class DocumentDao {
* Update a document.
*
* @param document Document to update
* @param userId User ID
* @return Updated document
*/
public Document update(Document document) {
public Document update(Document document, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
@ -296,11 +312,19 @@ public class DocumentDao {
// Update the document
documentFromDb.setTitle(document.getTitle());
documentFromDb.setDescription(document.getDescription());
documentFromDb.setSubject(document.getSubject());
documentFromDb.setIdentifier(document.getIdentifier());
documentFromDb.setPublisher(document.getPublisher());
documentFromDb.setFormat(document.getFormat());
documentFromDb.setSource(document.getSource());
documentFromDb.setType(document.getType());
documentFromDb.setCoverage(document.getCoverage());
documentFromDb.setRights(document.getRights());
documentFromDb.setCreateDate(document.getCreateDate());
documentFromDb.setLanguage(document.getLanguage());
// Create audit log
AuditLogUtil.create(documentFromDb, AuditLogType.UPDATE);
AuditLogUtil.create(documentFromDb, AuditLogType.UPDATE, userId);
return documentFromDb;
}

View File

@ -23,10 +23,11 @@ public class FileDao {
* Creates a new file.
*
* @param file File
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(File file) {
public String create(File file, String userId) {
// Create the UUID
file.setId(UUID.randomUUID().toString());
@ -36,7 +37,7 @@ public class FileDao {
em.persist(file);
// Create audit log
AuditLogUtil.create(file, AuditLogType.CREATE);
AuditLogUtil.create(file, AuditLogType.CREATE, userId);
return file.getId();
}
@ -107,8 +108,9 @@ public class FileDao {
* Deletes a file.
*
* @param id File ID
* @param userId User ID
*/
public void delete(String id) {
public void delete(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file
@ -121,7 +123,7 @@ public class FileDao {
fileDb.setDeleteDate(dateNow);
// Create audit log
AuditLogUtil.create(fileDb, AuditLogType.DELETE);
AuditLogUtil.create(fileDb, AuditLogType.DELETE, userId);
}
/**

View File

@ -170,10 +170,11 @@ public class TagDao {
* Creates a new tag.
*
* @param tag Tag
* @param userId User ID
* @return New ID
* @throws Exception
*/
public String create(Tag tag) {
public String create(Tag tag, String userId) {
// Create the UUID
tag.setId(UUID.randomUUID().toString());
@ -183,7 +184,7 @@ public class TagDao {
em.persist(tag);
// Create audit log
AuditLogUtil.create(tag, AuditLogType.CREATE);
AuditLogUtil.create(tag, AuditLogType.CREATE, userId);
return tag.getId();
}
@ -230,8 +231,9 @@ public class TagDao {
* Deletes a tag.
*
* @param tagId Tag ID
* @param userId User ID
*/
public void delete(String tagId) {
public void delete(String tagId, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the tag
@ -250,7 +252,7 @@ public class TagDao {
q.executeUpdate();
// Create audit log
AuditLogUtil.create(tagDb, AuditLogType.DELETE);
AuditLogUtil.create(tagDb, AuditLogType.DELETE, userId);
}
/**
@ -272,9 +274,10 @@ public class TagDao {
* Update a tag.
*
* @param tag Tag to update
* @param userId User ID
* @return Updated tag
*/
public Tag update(Tag tag) {
public Tag update(Tag tag, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the tag
@ -288,7 +291,7 @@ public class TagDao {
tagFromDb.setParentId(tag.getParentId());
// Create audit log
AuditLogUtil.create(tagFromDb, AuditLogType.UPDATE);
AuditLogUtil.create(tagFromDb, AuditLogType.UPDATE, userId);
return tagFromDb;
}

View File

@ -58,10 +58,11 @@ public class UserDao {
* Creates a new user.
*
* @param user User to create
* @param userId User ID
* @return User ID
* @throws Exception
*/
public String create(User user) throws Exception {
public String create(User user, String userId) throws Exception {
// Create the user UUID
user.setId(UUID.randomUUID().toString());
@ -80,7 +81,7 @@ public class UserDao {
em.persist(user);
// Create audit log
AuditLogUtil.create(user, AuditLogType.CREATE);
AuditLogUtil.create(user, AuditLogType.CREATE, userId);
return user.getId();
}
@ -89,9 +90,10 @@ public class UserDao {
* Updates a user.
*
* @param user User to update
* @param userId User ID
* @return Updated user
*/
public User update(User user) {
public User update(User user, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the user
@ -99,13 +101,13 @@ public class UserDao {
q.setParameter("id", user.getId());
User userFromDb = (User) q.getSingleResult();
// Update the user
// Update the user (except password)
userFromDb.setEmail(user.getEmail());
userFromDb.setStorageQuota(user.getStorageQuota());
userFromDb.setStorageCurrent(user.getStorageCurrent());
// Create audit log
AuditLogUtil.create(userFromDb, AuditLogType.UPDATE);
AuditLogUtil.create(userFromDb, AuditLogType.UPDATE, userId);
return user;
}
@ -134,9 +136,10 @@ public class UserDao {
* Update the user password.
*
* @param user User to update
* @param userId User ID
* @return Updated user
*/
public User updatePassword(User user) {
public User updatePassword(User user, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the user
@ -148,7 +151,7 @@ public class UserDao {
userFromDb.setPassword(hashPassword(user.getPassword()));
// Create audit log
AuditLogUtil.create(userFromDb, AuditLogType.UPDATE);
AuditLogUtil.create(userFromDb, AuditLogType.UPDATE, userId);
return user;
}
@ -206,8 +209,9 @@ public class UserDao {
* Deletes a user.
*
* @param username User's username
* @param userId User ID
*/
public void delete(String username) {
public void delete(String username, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the user
@ -245,7 +249,7 @@ public class UserDao {
q.executeUpdate();
// Create audit log
AuditLogUtil.create(userFromDb, AuditLogType.DELETE);
AuditLogUtil.create(userFromDb, AuditLogType.DELETE, userId);
}
/**

View File

@ -0,0 +1,104 @@
package com.sismics.docs.core.dao.jpa;
import java.util.List;
import java.util.UUID;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.util.context.ThreadLocalContext;
/**
* Vocabulary DAO.
*
* @author bgamard
*/
public class VocabularyDao {
/**
* Creates a new vocabulary entry.
*
* @param vocabulary Vocabulary
* @return New ID
* @throws Exception
*/
public String create(Vocabulary vocabulary) {
// Create the UUID
vocabulary.setId(UUID.randomUUID().toString());
// Create the comment
EntityManager em = ThreadLocalContext.get().getEntityManager();
em.persist(vocabulary);
return vocabulary.getId();
}
/**
* Get all vocabulary entries sharing a single name
*
* @param name Name
* @return Vocabulary entries
*/
@SuppressWarnings("unchecked")
public List<Vocabulary> getByName(String name) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the entries
Query q = em.createQuery("select v from Vocabulary v where v.name = :name order by v.order");
q.setParameter("name", name);
return q.getResultList();
}
/**
* Get a vocabulary entry by ID.
*
* @param id ID
* @return Vocabulary
*/
public Vocabulary getById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
try {
return em.find(Vocabulary.class, id);
} catch (NoResultException e) {
return null;
}
}
/**
* Update a vocabulary entry.
*
* @param vocabulary Vocabulary to update
*/
public Vocabulary update(Vocabulary vocabulary) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the vocabulary entry
Query q = em.createQuery("select v from Vocabulary v where v.id = :id");
q.setParameter("id", vocabulary.getId());
Vocabulary vocabularyFromDb = (Vocabulary) q.getSingleResult();
// Update the vocabulary entry
vocabularyFromDb.setName(vocabulary.getName());
vocabularyFromDb.setValue(vocabulary.getValue());
vocabularyFromDb.setOrder(vocabulary.getOrder());
return vocabularyFromDb;
}
/**
* Deletes a vocabulary entry.
*
* @param id Vocabulary ID
*/
public void delete(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the vocabulary
Query q = em.createQuery("select v from Vocabulary v where v.id = :id");
q.setParameter("id", id);
Vocabulary vocabularyDb = (Vocabulary) q.getSingleResult();
em.remove(vocabularyDb);
}
}

View File

@ -51,146 +51,79 @@ public class DocumentCriteria {
private String language;
/**
* Getter of userId.
*
* @return userId
* Creator ID.
*/
private String creatorId;
public String getUserId() {
return userId;
}
/**
* Setter of userId.
*
* @param userId userId
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* Getter of search.
*
* @return the search
*/
public String getSearch() {
return search;
}
/**
* Setter of search.
*
* @param search search
*/
public void setSearch(String search) {
this.search = search;
}
/**
* Getter of fullSearch.
*
* @return the fullSearch
*/
public String getFullSearch() {
return fullSearch;
}
/**
* Setter of fullSearch.
*
* @param fullSearch fullSearch
*/
public void setFullSearch(String fullSearch) {
this.fullSearch = fullSearch;
}
/**
* Getter of createDateMin.
*
* @return the createDateMin
*/
public Date getCreateDateMin() {
return createDateMin;
}
/**
* Setter of createDateMin.
*
* @param createDateMin createDateMin
*/
public void setCreateDateMin(Date createDateMin) {
this.createDateMin = createDateMin;
}
/**
* Getter of createDateMax.
*
* @return the createDateMax
*/
public Date getCreateDateMax() {
return createDateMax;
}
/**
* Setter of createDateMax.
*
* @param createDateMax createDateMax
*/
public void setCreateDateMax(Date createDateMax) {
this.createDateMax = createDateMax;
}
/**
* Getter of tagIdList.
*
* @return the tagIdList
*/
public List<String> getTagIdList() {
return tagIdList;
}
/**
* Setter of tagIdList.
*
* @param tagIdList tagIdList
*/
public void setTagIdList(List<String> tagIdList) {
this.tagIdList = tagIdList;
}
/**
* Getter of shared.
*
* @return the shared
*/
public Boolean getShared() {
return shared;
}
/**
* Setter of shared.
*
* @param shared shared
*/
public void setShared(Boolean shared) {
this.shared = shared;
}
/**
* Getter of language.
*
* @return the language
*/
public String getLanguage() {
return language;
}
/**
* Setter of language.
*
* @param language language
*/
public void setLanguage(String language) {
this.language = language;
}
public String getCreatorId() {
return creatorId;
}
public void setCreatorId(String creatorId) {
this.creatorId = creatorId;
}
}

View File

@ -13,20 +13,10 @@ public class UserCriteria {
*/
private String search;
/**
* Getter of search.
*
* @return the search
*/
public String getSearch() {
return search;
}
/**
* Setter of search.
*
* @param search search
*/
public UserCriteria setSearch(String search) {
this.search = search;
return this;

View File

@ -1,7 +1,5 @@
package com.sismics.docs.core.dao.jpa.dto;
import javax.persistence.Id;
import com.sismics.docs.core.constant.PermType;
/**
@ -13,7 +11,6 @@ public class AclDto {
/**
* Acl ID.
*/
@Id
private String id;
/**

View File

@ -1,7 +1,5 @@
package com.sismics.docs.core.dao.jpa.dto;
import javax.persistence.Id;
import com.sismics.docs.core.constant.AuditLogType;
/**
@ -13,9 +11,13 @@ public class AuditLogDto {
/**
* Audit log ID.
*/
@Id
private String id;
/**
* Username.
*/
private String username;
/**
* Entity ID.
*/
@ -49,6 +51,14 @@ public class AuditLogDto {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getEntityId() {
return entityId;
}

View File

@ -1,7 +1,5 @@
package com.sismics.docs.core.dao.jpa.dto;
import javax.persistence.Id;
/**
* Comment DTO.
*
@ -11,7 +9,6 @@ public class CommentDto {
/**
* Comment ID.
*/
@Id
private String id;
/**

View File

@ -0,0 +1,34 @@
package com.sismics.docs.core.dao.jpa.dto;
/**
* Contributor DTO.
*
* @author bgamard
*/
public class ContributorDto {
/**
* Username.
*/
private String username;
/**
* Email.
*/
private String email;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}

View File

@ -1,7 +1,5 @@
package com.sismics.docs.core.dao.jpa.dto;
import javax.persistence.Id;
/**
* Document DTO.
*
@ -11,7 +9,6 @@ public class DocumentDto {
/**
* Document ID.
*/
@Id
private String id;
/**
@ -24,6 +21,46 @@ public class DocumentDto {
*/
private String description;
/**
* Subject.
*/
private String subject;
/**
* Identifier.
*/
private String identifier;
/**
* Publisher.
*/
private String publisher;
/**
* Format.
*/
private String format;
/**
* Source.
*/
private String source;
/**
* Type.
*/
private String type;
/**
* Coverage.
*/
private String coverage;
/**
* Rights.
*/
private String rights;
/**
* Language.
*/
@ -49,142 +86,130 @@ public class DocumentDto {
*/
private String creator;
/**
* Getter de id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Setter de id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter de title.
*
* @return the title
*/
public String getTitle() {
return title;
}
/**
* Setter de title.
*
* @param title title
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Getter de description.
*
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Setter de description.
*
* @param description description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Getter de createTimestamp.
*
* @return the createTimestamp
*/
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getPublisher() {
return publisher;
}
public void setPublisher(String publisher) {
this.publisher = publisher;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getCoverage() {
return coverage;
}
public void setCoverage(String coverage) {
this.coverage = coverage;
}
public String getRights() {
return rights;
}
public void setRights(String rights) {
this.rights = rights;
}
public Long getCreateTimestamp() {
return createTimestamp;
}
/**
* Setter of createTimestamp.
*
* @param createTimestamp createTimestamp
*/
public void setCreateTimestamp(Long createTimestamp) {
this.createTimestamp = createTimestamp;
}
/**
* Getter of shared.
*
* @return the shared
*/
public Boolean getShared() {
return shared;
}
/**
* Setter of shared.
*
* @param shared shared
*/
public void setShared(Boolean shared) {
this.shared = shared;
}
/**
* Getter of language.
*
* @return the language
*/
public String getLanguage() {
return language;
}
/**
* Setter of language.
*
* @param language language
*/
public void setLanguage(String language) {
this.language = language;
}
/**
* Getter of fileCount.
* @return fileCount
*/
public Integer getFileCount() {
return fileCount;
}
/**
* Setter of fileCount.
* @param fileCount fileCount
*/
public void setFileCount(Integer fileCount) {
this.fileCount = fileCount;
}
/**
* Getter of creator.
* @return creator
*/
public String getCreator() {
return creator;
}
/**
* Setter of creator.
* @param creator creator
*/
public void setCreator(String creator) {
this.creator = creator;
}

View File

@ -1,7 +1,5 @@
package com.sismics.docs.core.dao.jpa.dto;
import javax.persistence.Id;
/**
* Tag DTO.
*
@ -11,7 +9,6 @@ public class TagDto {
/**
* Tag ID.
*/
@Id
private String id;
/**

View File

@ -37,69 +37,38 @@ public class UserDto {
*/
private Long storageCurrent;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of username.
*
* @return username
*/
public String getUsername() {
return username;
}
/**
* Setter of username.
*
* @param username username
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Getter of email.
*
* @return email
*/
public String getEmail() {
return email;
}
/**
* Setter of email.
*
* @param email email
*/
public void setEmail(String email) {
this.email = email;
}
/**
* Getter of createTimestamp.
*
* @return createTimestamp
*/
public Long getCreateTimestamp() {
return createTimestamp;
}
public void setCreateTimestamp(Long createTimestamp) {
this.createTimestamp = createTimestamp;
}
public Long getStorageQuota() {
return storageQuota;
}
@ -115,13 +84,4 @@ public class UserDto {
public void setStorageCurrent(Long storageCurrent) {
this.storageCurrent = storageCurrent;
}
/**
* Setter of createTimestamp.
*
* @param createTimestamp createTimestamp
*/
public void setCreateTimestamp(Long createTimestamp) {
this.createTimestamp = createTimestamp;
}
}

View File

@ -1,135 +0,0 @@
package com.sismics.docs.core.dao.lucene;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.standard.ClassicAnalyzer;
import org.apache.lucene.analysis.standard.ClassicTokenizer;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.util.StopwordAnalyzerBase;
import org.apache.lucene.util.Version;
import java.io.IOException;
import java.io.Reader;
/**
* Filters {@link StandardTokenizer} with {@link StandardFilter}, {@link
* LowerCaseFilter} and {@link StopFilter}, using a list of
* English stop words.
*
* <a name="version"/>
* <p>You must specify the required {@link Version}
* compatibility when creating StandardAnalyzer:
* <ul>
* <li> As of 3.4, Hiragana and Han characters are no longer wrongly split
* from their combining characters. If you use a previous version number,
* you get the exact broken behavior for backwards compatibility.
* <li> As of 3.1, StandardTokenizer implements Unicode text segmentation,
* and StopFilter correctly handles Unicode 4.0 supplementary characters
* in stopwords. {@link ClassicTokenizer} and {@link ClassicAnalyzer}
* are the pre-3.1 implementations of StandardTokenizer and
* StandardAnalyzer.
* <li> As of 2.9, StopFilter preserves position increments
* <li> As of 2.4, Tokens incorrectly identified as acronyms
* are corrected (see <a href="https://issues.apache.org/jira/browse/LUCENE-1068">LUCENE-1068</a>)
* </ul>
*/
public final class DocsStandardAnalyzer extends StopwordAnalyzerBase {
/** Default maximum allowed token length */
public static final int DEFAULT_MAX_TOKEN_LENGTH = 255;
private int maxTokenLength = DEFAULT_MAX_TOKEN_LENGTH;
/** An unmodifiable set containing some common English words that are usually not
useful for searching. */
public static final CharArraySet STOP_WORDS_SET = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
/** Builds an analyzer with the given stop words.
* @param matchVersion Lucene version to match See {@link
* <a href="#version">above</a>}
* @param stopWords stop words */
public DocsStandardAnalyzer(Version matchVersion, CharArraySet stopWords) {
super(matchVersion, stopWords);
}
/** Builds an analyzer with the default stop words ({@link
* #STOP_WORDS_SET}).
* @param matchVersion Lucene version to match See {@link
* <a href="#version">above</a>}
*/
public DocsStandardAnalyzer(Version matchVersion) {
this(matchVersion, STOP_WORDS_SET);
}
/** Builds an analyzer with the stop words from the given reader.
* @see WordlistLoader#getWordSet(Reader, Version)
* @param matchVersion Lucene version to match See {@link
* <a href="#version">above</a>}
* @param stopwords Reader to read stop words from */
public DocsStandardAnalyzer(Version matchVersion, Reader stopwords) throws IOException {
this(matchVersion, loadStopwordSet(stopwords, matchVersion));
}
/**
* Set maximum allowed token length. If a token is seen
* that exceeds this length then it is discarded. This
* setting only takes effect the next time tokenStream or
* tokenStream is called.
*/
public void setMaxTokenLength(int length) {
maxTokenLength = length;
}
/**
* @see #setMaxTokenLength
*/
public int getMaxTokenLength() {
return maxTokenLength;
}
@Override
protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) {
final StandardTokenizer src = new StandardTokenizer(matchVersion, reader);
src.setMaxTokenLength(maxTokenLength);
TokenStream tok = new StandardFilter(matchVersion, src);
tok = new LowerCaseFilter(matchVersion, tok);
tok = new StopFilter(matchVersion, tok, stopwords);
return new TokenStreamComponents(src, tok) {
@Override
protected void setReader(final Reader reader) throws IOException {
src.setMaxTokenLength(DocsStandardAnalyzer.this.maxTokenLength);
super.setReader(reader);
}
};
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
if (fieldName.equals("title") || fieldName.equals("description")) {
return new HTMLStripCharFilter(super.initReader(fieldName, reader));
}
return super.initReader(fieldName, reader);
}
}

View File

@ -6,6 +6,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
@ -19,7 +20,6 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.Version;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Document;
@ -152,14 +152,23 @@ public class LuceneDao {
fullSearchQuery = "\"" + QueryParserUtil.escape(fullSearchQuery) + "\"";
// Build search query
StandardQueryParser qpHelper = new StandardQueryParser(new DocsStandardAnalyzer(Version.LUCENE_42));
StandardQueryParser qpHelper = new StandardQueryParser(new StandardAnalyzer());
qpHelper.setPhraseSlop(100000); // PhraseQuery add terms
// Search on documents and files
BooleanQuery query = new BooleanQuery();
query.add(qpHelper.parse(searchQuery, "title"), Occur.SHOULD);
query.add(qpHelper.parse(searchQuery, "description"), Occur.SHOULD);
query.add(qpHelper.parse(fullSearchQuery, "content"), Occur.SHOULD);
BooleanQuery query = new BooleanQuery.Builder()
.add(qpHelper.parse(searchQuery, "title"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "description"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "subject"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "identifier"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "publisher"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "format"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "source"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "type"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "coverage"), Occur.SHOULD)
.add(qpHelper.parse(searchQuery, "rights"), Occur.SHOULD)
.add(qpHelper.parse(fullSearchQuery, "content"), Occur.SHOULD)
.build();
// Search
DirectoryReader directoryReader = AppContext.getInstance().getIndexingService().getDirectoryReader();
@ -175,7 +184,7 @@ public class LuceneDao {
// Extract document IDs
for (int i = 0; i < docs.length; i++) {
org.apache.lucene.document.Document document = searcher.doc(docs[i].doc);
String type = document.get("type");
String type = document.get("doctype");
String documentId = null;
if (type.equals("document")) {
documentId = document.get("id");
@ -197,13 +206,35 @@ public class LuceneDao {
private org.apache.lucene.document.Document getDocumentFromDocument(Document document) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", document.getId(), Field.Store.YES));
luceneDocument.add(new StringField("type", "document", Field.Store.YES));
if (document.getTitle() != null) {
luceneDocument.add(new TextField("title", document.getTitle(), Field.Store.NO));
}
luceneDocument.add(new StringField("doctype", "document", Field.Store.YES));
luceneDocument.add(new TextField("title", document.getTitle(), Field.Store.NO));
if (document.getDescription() != null) {
luceneDocument.add(new TextField("description", document.getDescription(), Field.Store.NO));
}
if (document.getSubject() != null) {
luceneDocument.add(new TextField("subject", document.getSubject(), Field.Store.NO));
}
if (document.getIdentifier() != null) {
luceneDocument.add(new TextField("identifier", document.getIdentifier(), Field.Store.NO));
}
if (document.getPublisher() != null) {
luceneDocument.add(new TextField("publisher", document.getPublisher(), Field.Store.NO));
}
if (document.getFormat() != null) {
luceneDocument.add(new TextField("format", document.getFormat(), Field.Store.NO));
}
if (document.getSource() != null) {
luceneDocument.add(new TextField("source", document.getSource(), Field.Store.NO));
}
if (document.getType() != null) {
luceneDocument.add(new TextField("type", document.getType(), Field.Store.NO));
}
if (document.getCoverage() != null) {
luceneDocument.add(new TextField("coverage", document.getCoverage(), Field.Store.NO));
}
if (document.getRights() != null) {
luceneDocument.add(new TextField("rights", document.getRights(), Field.Store.NO));
}
return luceneDocument;
}
@ -218,7 +249,7 @@ public class LuceneDao {
private org.apache.lucene.document.Document getDocumentFromFile(File file, Document document) {
org.apache.lucene.document.Document luceneDocument = new org.apache.lucene.document.Document();
luceneDocument.add(new StringField("id", file.getId(), Field.Store.YES));
luceneDocument.add(new StringField("type", "file", Field.Store.YES));
luceneDocument.add(new StringField("doctype", "file", Field.Store.YES));
luceneDocument.add(new StringField("document_id", file.getDocumentId(), Field.Store.YES));
if (file.getContent() != null) {
luceneDocument.add(new TextField("content", file.getContent(), Field.Store.NO));

View File

@ -8,7 +8,7 @@ import com.sismics.docs.core.model.jpa.Document;
*
* @author bgamard
*/
public class DocumentCreatedAsyncEvent {
public class DocumentCreatedAsyncEvent extends UserEvent {
/**
* Created document.
*/

View File

@ -8,7 +8,7 @@ import com.sismics.docs.core.model.jpa.Document;
*
* @author bgamard
*/
public class DocumentDeletedAsyncEvent {
public class DocumentDeletedAsyncEvent extends UserEvent {
/**
* Created document.
*/

View File

@ -8,7 +8,7 @@ import com.sismics.docs.core.model.jpa.Document;
*
* @author bgamard
*/
public class DocumentUpdatedAsyncEvent {
public class DocumentUpdatedAsyncEvent extends UserEvent {
/**
* Created document.
*/

View File

@ -11,7 +11,7 @@ import com.sismics.docs.core.model.jpa.File;
*
* @author bgamard
*/
public class FileCreatedAsyncEvent {
public class FileCreatedAsyncEvent extends UserEvent {
/**
* Created file.
*/

View File

@ -8,7 +8,7 @@ import com.sismics.docs.core.model.jpa.File;
*
* @author bgamard
*/
public class FileDeletedAsyncEvent {
public class FileDeletedAsyncEvent extends UserEvent {
/**
* Deleted file.
*/

View File

@ -0,0 +1,21 @@
package com.sismics.docs.core.event;
/**
* Event fired by a user.
*
* @author bgamard
*/
public abstract class UserEvent {
/**
* User ID who fired the event.
*/
private String userId;
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}

View File

@ -4,8 +4,11 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.ContributorDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.util.TransactionUtil;
/**
* Listener on document created.
@ -21,17 +24,29 @@ public class DocumentCreatedAsyncListener {
/**
* Document created.
*
* @param documentCreatedAsyncEvent Document created event
* @param event Document created event
* @throws Exception
*/
@Subscribe
public void on(final DocumentCreatedAsyncEvent documentCreatedAsyncEvent) throws Exception {
public void on(final DocumentCreatedAsyncEvent event) throws Exception {
if (log.isInfoEnabled()) {
log.info("Document created event: " + documentCreatedAsyncEvent.toString());
log.info("Document created event: " + event.toString());
}
TransactionUtil.handle(new Runnable() {
@Override
public void run() {
// Add the first contributor (the creator of the document)
ContributorDao contributorDao = new ContributorDao();
Contributor contributor = new Contributor();
contributor.setDocumentId(event.getDocument().getId());
contributor.setUserId(event.getUserId());
contributorDao.create(contributor);
}
});
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.createDocument(documentCreatedAsyncEvent.getDocument());
luceneDao.createDocument(event.getDocument());
}
}

View File

@ -1,11 +1,16 @@
package com.sismics.docs.core.listener.async;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.jpa.ContributorDao;
import com.sismics.docs.core.dao.lucene.LuceneDao;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.util.TransactionUtil;
/**
* Listener on document updated.
@ -21,17 +26,40 @@ public class DocumentUpdatedAsyncListener {
/**
* Document updated.
*
* @param documentUpdatedAsyncEvent Document updated event
* @param event Document updated event
* @throws Exception
*/
@Subscribe
public void on(final DocumentUpdatedAsyncEvent documentUpdatedAsyncEvent) throws Exception {
public void on(final DocumentUpdatedAsyncEvent event) throws Exception {
if (log.isInfoEnabled()) {
log.info("Document updated event: " + documentUpdatedAsyncEvent.toString());
log.info("Document updated event: " + event.toString());
}
// Update contributors list
TransactionUtil.handle(new Runnable() {
@Override
public void run() {
ContributorDao contributorDao = new ContributorDao();
List<Contributor> contributorList = contributorDao.findByDocumentId(event.getDocument().getId());
// Check if the user firing this event is not already a contributor
for (Contributor contributor : contributorList) {
if (contributor.getUserId().equals(event.getUserId())) {
// The current user is already a contributor on this document, don't do anything
return;
}
}
// Add a new contributor
Contributor contributor = new Contributor();
contributor.setDocumentId(event.getDocument().getId());
contributor.setUserId(event.getUserId());
contributorDao.create(contributor);
}
});
// Update Lucene index
LuceneDao luceneDao = new LuceneDao();
luceneDao.updateDocument(documentUpdatedAsyncEvent.getDocument());
luceneDao.updateDocument(event.getDocument());
}
}

View File

@ -27,6 +27,12 @@ public class AuditLog {
@Column(name = "LOG_ID_C", length = 36)
private String id;
/**
* User ID.
*/
@Column(name = "LOG_IDUSER_C", nullable = false, length = 36)
private String userId;
/**
* Entity ID.
*/
@ -58,110 +64,58 @@ public class AuditLog {
@Column(name = "LOG_CREATEDATE_D", nullable = false)
private Date createDate;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of entityId.
*
* @return entityId
*/
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getEntityId() {
return entityId;
}
/**
* Setter of entityId.
*
* @param entityId entityId
*/
public void setEntityId(String entityId) {
this.entityId = entityId;
}
/**
* Getter of entityClass.
*
* @return entityClass
*/
public String getEntityClass() {
return entityClass;
}
/**
* Setter of entityClass.
*
* @param entityClass entityClass
*/
public void setEntityClass(String entityClass) {
this.entityClass = entityClass;
}
/**
* Getter of message.
*
* @return message
*/
public String getMessage() {
return message;
}
/**
* Setter of message.
*
* @param message message
*/
public void setMessage(String message) {
this.message = message;
}
/**
* Getter of type.
*
* @return type
*/
public AuditLogType getType() {
return type;
}
/**
* Setter of type.
*
* @param type type
*/
public void setType(AuditLogType type) {
this.type = type;
}
/**
* Getter of createDate.
*
* @return createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}

View File

@ -60,124 +60,58 @@ public class AuthenticationToken {
@Column(name = "AUT_LASTCONNECTIONDATE_D")
private Date lastConnectionDate;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of userId.
*
* @return userId
*/
public String getUserId() {
return userId;
}
/**
* Setter of userId.
*
* @param userId userId
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* Getter of longLasted.
*
* @return longLasted
*/
public boolean isLongLasted() {
return longLasted;
}
/**
* Setter of longLasted.
*
* @param longLasted longLasted
*/
public void setLongLasted(boolean longLasted) {
this.longLasted = longLasted;
}
/**
* Getter of ip.
* @return ip
*/
public String getIp() {
return ip;
}
/**
* Setter of ip.
* @param ip ip
*/
public void setIp(String ip) {
this.ip = ip;
}
/**
* Getter of userAgent.
* @return userAgent
*/
public String getUserAgent() {
return userAgent;
}
/**
* Setter of userAgent.
* @param userAgent userAgent
*/
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
/**
* Getter of creationDate.
*
* @return creationDate
*/
public Date getCreationDate() {
return creationDate;
}
/**
* Setter of creationDate.
*
* @param creationDate creationDate
*/
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
/**
* Getter of lastConnectionDate.
*
* @return lastConnectionDate
*/
public Date getLastConnectionDate() {
return lastConnectionDate;
}
/**
* Setter of lastConnectionDate.
*
* @param lastConnectionDate lastConnectionDate
*/
public void setLastConnectionDate(Date lastConnectionDate) {
this.lastConnectionDate = lastConnectionDate;
}

View File

@ -22,20 +22,10 @@ public class BaseFunction {
@Column(name = "BAF_ID_C", length = 10)
private String id;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}

View File

@ -54,111 +54,51 @@ public class Comment implements Loggable {
@Column(name = "COM_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of documentId.
*
* @return the documentId
*/
public String getDocumentId() {
return documentId;
}
/**
* Setter of documentId.
*
* @param documentId documentId
*/
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
/**
* Getter of createDate.
*
* @return the createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of deleteDate.
*
* @return the deleteDate
*/
@Override
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}
/**
* Getter of content.
*
* @return the content
*/
public String getContent() {
return content;
}
/**
* Setter of content.
*
* @param content content
*/
public void setContent(String content) {
this.content = content;
}
/**
* Getter of userId.
*
* @return the userId
*/
public String getUserId() {
return userId;
}
/**
* Setter of userId.
*
* @param userId userId
*/
public void setUserId(String userId) {
this.userId = userId;
}

View File

@ -32,38 +32,18 @@ public class Config {
@Column(name = "CFG_VALUE_C", length = 250)
private String value;
/**
* Getter of id.
*
* @return id
*/
public ConfigType getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(ConfigType id) {
this.id = id;
}
/**
* Getter of value.
*
* @return value
*/
public String getValue() {
return value;
}
/**
* Setter of value.
*
* @param value value
*/
public void setValue(String value) {
this.value = value;
}

View File

@ -0,0 +1,69 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;
/**
* Contributor entity.
*
* @author bgamard
*/
@Entity
@Table(name = "T_Contributor")
public class Contributor {
/**
* Contributor ID.
*/
@Id
@Column(name = "CTR_ID_C", length = 36)
private String id;
/**
* Document ID.
*/
@Column(name = "CTR_IDDOC_C", length = 36, nullable = false)
private String documentId;
/**
* User ID.
*/
@Column(name = "CTR_IDUSER_C", length = 36, nullable = false)
private String userId;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDocumentId() {
return documentId;
}
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", id)
.add("userId", userId)
.add("documentId", documentId)
.toString();
}
}

View File

@ -39,7 +39,7 @@ public class Document implements Loggable {
/**
* Title.
*/
@Column(name = "DOC_TITLE_C", length = 100)
@Column(name = "DOC_TITLE_C", nullable = false, length = 100)
private String title;
/**
@ -48,6 +48,54 @@ public class Document implements Loggable {
@Column(name = "DOC_DESCRIPTION_C", length = 4000)
private String description;
/**
* Subject.
*/
@Column(name = "DOC_SUBJECT_C", length = 500)
private String subject;
/**
* Identifer.
*/
@Column(name = "DOC_IDENTIFIER_C", length = 500)
private String identifier;
/**
* Publisher.
*/
@Column(name = "DOC_PUBLISHER_C", length = 500)
private String publisher;
/**
* Format.
*/
@Column(name = "DOC_FORMAT_C", length = 500)
private String format;
/**
* Source.
*/
@Column(name = "DOC_SOURCE_C", length = 500)
private String source;
/**
* Type.
*/
@Column(name = "DOC_TYPE_C", length = 100)
private String type;
/**
* Coverage.
*/
@Column(name = "DOC_COVERAGE_C", length = 100)
private String coverage;
/**
* Rights.
*/
@Column(name = "DOC_RIGHTS_C", length = 100)
private String rights;
/**
* Creation date.
*/
@ -60,129 +108,123 @@ public class Document implements Loggable {
@Column(name = "DOC_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of language.
*
* @return the language
*/
public String getLanguage() {
return language;
}
/**
* Setter of language.
*
* @param language language
*/
public void setLanguage(String language) {
this.language = language;
}
/**
* Getter of userId.
*
* @return the userId
*/
public String getUserId() {
return userId;
}
/**
* Setter of userId.
*
* @param userId userId
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* Getter of title.
*
* @return the title
*/
public String getTitle() {
return title;
}
/**
* Setter of title.
*
* @param title title
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Getter of description.
*
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Setter of description.
*
* @param description description
*/
public void setDescription(String description) {
this.description = description;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getPublisher() {
return publisher;
}
public void setPublisher(String publisher) {
this.publisher = publisher;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getCoverage() {
return coverage;
}
public void setCoverage(String coverage) {
this.coverage = coverage;
}
public String getRights() {
return rights;
}
public void setRights(String rights) {
this.rights = rights;
}
/**
* Getter of createDate.
*
* @return the createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of deleteDate.
*
* @return the deleteDate
*/
@Override
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}

View File

@ -48,74 +48,34 @@ public class DocumentTag implements Serializable {
@Column(name = "DOT_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter de documentId.
*
* @return the documentId
*/
public String getDocumentId() {
return documentId;
}
/**
* Setter de documentId.
*
* @param documentId documentId
*/
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
/**
* Getter de tagId.
*
* @return the tagId
*/
public String getTagId() {
return tagId;
}
/**
* Setter de tagId.
*
* @param tagId tagId
*/
public void setTagId(String tagId) {
this.tagId = tagId;
}
/**
* Getter of deleteDate.
*
* @return the deleteDate
*/
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}

View File

@ -42,74 +42,34 @@ public class Role {
@Column(name = "ROL_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of name.
*
* @return name
*/
public String getName() {
return name;
}
/**
* Setter of name.
*
* @param name name
*/
public void setName(String name) {
this.name = name;
}
/**
* Getter of createDate.
*
* @return createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of deleteDate.
*
* @return deleteDate
*/
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}

View File

@ -48,92 +48,42 @@ public class RoleBaseFunction {
@Column(name = "RBF_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of roleId.
*
* @return roleId
*/
public String getRoleId() {
return roleId;
}
/**
* Setter of roleId.
*
* @param roleId roleId
*/
public void setRoleId(String roleId) {
this.roleId = roleId;
}
/**
* Getter of baseFunctionId.
*
* @return baseFunctionId
*/
public String getBaseFunctionId() {
return baseFunctionId;
}
/**
* Setter of baseFunctionId.
*
* @param baseFunctionId baseFunctionId
*/
public void setBaseFunctionId(String baseFunctionId) {
this.baseFunctionId = baseFunctionId;
}
/**
* Getter of createDate.
*
* @return createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of deleteDate.
*
* @return deleteDate
*/
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}

View File

@ -40,74 +40,34 @@ public class Share {
@Column(name = "SHA_DELETEDATE_D")
private Date deleteDate;
/**
* Getter of id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Setter of name.
*
* @param name name
*/
public void setName(String name) {
this.name = name;
}
/**
* Getter of createDate.
*
* @return the createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of deleteDate.
*
* @return the deleteDate
*/
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}

View File

@ -60,129 +60,59 @@ public class Tag implements Loggable {
@Column(name = "TAG_COLOR_C", nullable = false, length = 7)
private String color;
/**
* Getter of id.
*
* @return id
*/
public String getId() {
return id;
}
/**
* Setter of id.
*
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Getter of userId.
*
* @return the userId
*/
public String getUserId() {
return userId;
}
/**
* Setter of userId.
*
* @param userId userId
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* Getter of name.
*
* @return name
*/
public String getName() {
return name;
}
/**
* Setter of name.
*
* @param name name
*/
public void setName(String name) {
this.name = name;
}
/**
* Getter of createDate.
*
* @return createDate
*/
public Date getCreateDate() {
return createDate;
}
/**
* Setter of createDate.
*
* @param createDate createDate
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* Getter of color.
*
* @return the color
*/
public String getColor() {
return color;
}
/**
* Setter of color.
*
* @param color color
*/
public void setColor(String color) {
this.color = color;
}
/**
* Getter of deleteDate.
*
* @return deleteDate
*/
@Override
public Date getDeleteDate() {
return deleteDate;
}
/**
* Setter of deleteDate.
*
* @param deleteDate deleteDate
*/
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}
/**
* Getter of parentId.
*
* @return parentId
*/
public String getParentId() {
return parentId;
}
/**
* Setter of parentId.
*
* @param parentId parentId
*/
public void setParentId(String parentId) {
this.parentId = parentId;
}

View File

@ -0,0 +1,84 @@
package com.sismics.docs.core.model.jpa;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.google.common.base.MoreObjects;
/**
* Vocabulary entry entity.
*
* @author bgamard
*/
@Entity
@Table(name = "T_VOCABULARY")
public class Vocabulary {
/**
* Vocabulary ID.
*/
@Id
@Column(name = "VOC_ID_C", nullable = false, length = 36)
private String id;
/**
* Vocabulary name.
*/
@Column(name = "VOC_NAME_C", nullable = false, length = 50)
private String name;
/**
* Vocabulary value.
*/
@Column(name = "VOC_VALUE_C", nullable = false, length = 500)
private String value;
/**
* Vocabulary order.
*/
@Column(name = "VOC_ORDER_N")
private int order;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", id)
.add("name", name)
.add("value", value)
.add("order", order)
.toString();
}
}

View File

@ -4,11 +4,15 @@ import java.io.IOException;
import java.nio.file.Path;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.CheckIndex.Status;
import org.apache.lucene.index.CheckIndex.Status.SegmentInfoStatus;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store.SimpleFSLockFactory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -59,11 +63,37 @@ public class IndexingService extends AbstractScheduledService {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory);
try {
directory = new SimpleFSDirectory(luceneDirectory.toFile(), new SimpleFSLockFactory());
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
} catch (IOException e) {
log.error("Error initializing Lucene index", e);
}
}
// Check index version and rebuild it if necessary
try {
if (DirectoryReader.indexExists(directory)) {
log.info("Checking index health and version");
try (CheckIndex checkIndex = new CheckIndex(directory)) {
Status status = checkIndex.checkIndex();
if (status.clean) {
for (SegmentInfoStatus segmentInfo : status.segmentInfos) {
if (!segmentInfo.version.onOrAfter(Version.LATEST)) {
log.info("Index is old (" + segmentInfo.version + "), rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
break;
}
}
} else {
log.info("Index is dirty, rebuilding");
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
}
}
}
} catch (Exception e) {
log.error("Error checking index", e);
}
}
@Override
@ -127,10 +157,10 @@ public class IndexingService extends AbstractScheduledService {
*/
public DirectoryReader getDirectoryReader() {
if (directoryReader == null) {
if (!DirectoryReader.indexExists(directory)) {
return null;
}
try {
if (!DirectoryReader.indexExists(directory)) {
return null;
}
directoryReader = DirectoryReader.open(directory);
} catch (IOException e) {
log.error("Error creating the directory reader", e);

View File

@ -20,7 +20,7 @@ public class AuditLogUtil {
* @param entity Entity
* @param type Audit log type
*/
public static void create(Loggable loggable, AuditLogType type) {
public static void create(Loggable loggable, AuditLogType type, String userId) {
// Get the entity ID
EntityManager em = ThreadLocalContext.get().getEntityManager();
String entityId = (String) em.getEntityManagerFactory().getPersistenceUnitUtil().getIdentifier(loggable);
@ -28,6 +28,7 @@ public class AuditLogUtil {
// Create the audit log
AuditLogDao auditLogDao = new AuditLogDao();
AuditLog auditLog = new AuditLog();
auditLog.setUserId(userId);
auditLog.setEntityId(entityId);
auditLog.setEntityClass(loggable.getClass().getSimpleName());
auditLog.setType(type);

View File

@ -1,16 +1,16 @@
package com.sismics.docs.core.util;
import com.sismics.docs.core.dao.lucene.DocsStandardAnalyzer;
import com.sismics.docs.core.model.context.AppContext;
import java.io.IOException;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import com.sismics.docs.core.model.context.AppContext;
/**
* Lucene utils.
@ -31,7 +31,10 @@ public class LuceneUtil {
*/
public static void handle(LuceneRunnable runnable) {
// Standard analyzer
IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_42, new DocsStandardAnalyzer(Version.LUCENE_42));
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
// Automatically commit when closing this writer
config.setCommitOnClose(true);
// Merge sequentially, because Lucene writing is already done asynchronously
config.setMergeScheduler(new SerialMergeScheduler());
@ -45,15 +48,6 @@ public class LuceneUtil {
log.error("Cannot create IndexWriter", e);
}
// Unlock index if needed
try {
if (IndexWriter.isLocked(directory)) {
IndexWriter.unlock(directory);
}
} catch (IOException e) {
log.error("Cannot unlock Lucene directory", e);
}
try {
runnable.run(indexWriter);
} catch (Exception e) {
@ -68,7 +62,7 @@ public class LuceneUtil {
try {
indexWriter.close();
} catch (IOException e) {
log.error("Cannot close IndexWriter", e);
log.error("Cannot commit and close IndexWriter", e);
}
}

View File

@ -89,7 +89,7 @@ public abstract class DbOpenHelper {
oldVersion = Integer.parseInt(oldVersionStr);
}
} catch (Exception e) {
if (e.getMessage().contains("object not found")) {
if (e.getMessage().contains("not found")) {
log.info("Unable to get database version: Table T_CONFIG not found");
} else {
log.error("Unable to get database version", e);
@ -120,6 +120,7 @@ public abstract class DbOpenHelper {
log.error("Unable to complete schema update", e);
} finally {
try {
connection.commit();
if (stmt != null) {
stmt.close();
stmt = null;

View File

@ -1 +1 @@
db.version=5
db.version=6

View File

@ -0,0 +1,287 @@
alter table T_DOCUMENT add column DOC_SUBJECT_C varchar(500);
alter table T_DOCUMENT add column DOC_IDENTIFIER_C varchar(500);
alter table T_DOCUMENT add column DOC_PUBLISHER_C varchar(500);
alter table T_DOCUMENT add column DOC_FORMAT_C varchar(500);
alter table T_DOCUMENT add column DOC_SOURCE_C varchar(500);
alter table T_DOCUMENT add column DOC_TYPE_C varchar(500);
alter table T_DOCUMENT add column DOC_COVERAGE_C varchar(500);
alter table T_DOCUMENT add column DOC_RIGHTS_C varchar(500);
alter table T_AUDIT_LOG add column LOG_IDUSER_C varchar(36) not null default 'admin';
create memory table T_VOCABULARY ( VOC_ID_C varchar(36) not null, VOC_NAME_C varchar(50) not null, VOC_VALUE_C varchar(500) not null, VOC_ORDER_N int not null, primary key (VOC_ID_C) );
create cached table T_CONTRIBUTOR ( CTR_ID_C varchar(36) not null, CTR_IDUSER_C varchar(36) not null, CTR_IDDOC_C varchar(36) not null, primary key (CTR_ID_C) );
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-collection', 'type', 'Collection', 0);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-dataset', 'type', 'Dataset', 1);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-event', 'type', 'Event', 2);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-image', 'type', 'Image', 3);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-interactive-resource', 'type', 'Interactive Resource', 4);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-moving-image', 'type', 'Moving Image', 5);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-physical-object', 'type', 'Physical Object', 6);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-service', 'type', 'Service', 7);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-software', 'type', 'Software', 8);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-sound', 'type', 'Sound', 9);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-still-image', 'type', 'Still Image', 10);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('type-text', 'type', 'Text', 11);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-afg', 'coverage', 'Afghanistan', 0);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ala', 'coverage', 'Åland Islands', 1);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-alb', 'coverage', 'Albania', 2);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-dza', 'coverage', 'Algeria', 3);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-asm', 'coverage', 'American Samoa', 4);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-and', 'coverage', 'Andorra', 5);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ago', 'coverage', 'Angola', 6);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-aia', 'coverage', 'Anguilla', 7);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ata', 'coverage', 'Antarctica', 8);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-atg', 'coverage', 'Antigua and Barbuda', 9);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-arg', 'coverage', 'Argentina', 10);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-arm', 'coverage', 'Armenia', 11);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-abw', 'coverage', 'Aruba', 12);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-aus', 'coverage', 'Australia', 13);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-aut', 'coverage', 'Austria', 14);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-aze', 'coverage', 'Azerbaijan', 15);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bhs', 'coverage', 'Bahamas', 16);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bhr', 'coverage', 'Bahrain', 17);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bgd', 'coverage', 'Bangladesh', 18);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-brb', 'coverage', 'Barbados', 19);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-blr', 'coverage', 'Belarus', 20);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bel', 'coverage', 'Belgium', 21);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-blz', 'coverage', 'Belize', 22);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ben', 'coverage', 'Benin', 23);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bmu', 'coverage', 'Bermuda', 24);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-btn', 'coverage', 'Bhutan', 25);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bol', 'coverage', 'Bolivia (Plurinational State of)', 26);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bes', 'coverage', 'Bonaire, Sint Eustatius and Saba', 27);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bih', 'coverage', 'Bosnia and Herzegovina', 28);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bwa', 'coverage', 'Botswana', 29);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bvt', 'coverage', 'Bouvet Island', 30);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bra', 'coverage', 'Brazil', 31);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-iot', 'coverage', 'British Indian Ocean Territory', 32);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-brn', 'coverage', 'Brunei Darussalam', 33);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bgr', 'coverage', 'Bulgaria', 34);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bfa', 'coverage', 'Burkina Faso', 35);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-bdi', 'coverage', 'Burundi', 36);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cpv', 'coverage', 'Cabo Verde', 37);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-khm', 'coverage', 'Cambodia', 38);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cmr', 'coverage', 'Cameroon', 39);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-can', 'coverage', 'Canada', 40);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cym', 'coverage', 'Cayman Islands', 41);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-caf', 'coverage', 'Central African Republic', 42);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tcd', 'coverage', 'Chad', 43);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-chl', 'coverage', 'Chile', 44);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-chn', 'coverage', 'China', 45);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cxr', 'coverage', 'Christmas Island', 46);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cck', 'coverage', 'Cocos (Keeling) Islands', 47);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-col', 'coverage', 'Colombia', 48);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-com', 'coverage', 'Comoros', 49);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cog', 'coverage', 'Congo', 50);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cod', 'coverage', 'Congo (Democratic Republic of the)', 51);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cok', 'coverage', 'Cook Islands', 52);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cri', 'coverage', 'Costa Rica', 53);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-civ', 'coverage', 'Côte d''Ivoire', 54);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hrv', 'coverage', 'Croatia', 55);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cub', 'coverage', 'Cuba', 56);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cuw', 'coverage', 'Curaçao', 57);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cyp', 'coverage', 'Cyprus', 58);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-cze', 'coverage', 'Czech Republic', 59);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-dnk', 'coverage', 'Denmark', 60);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-dji', 'coverage', 'Djibouti', 61);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-dma', 'coverage', 'Dominica', 62);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-dom', 'coverage', 'Dominican Republic', 63);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ecu', 'coverage', 'Ecuador', 64);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-egy', 'coverage', 'Egypt', 65);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-slv', 'coverage', 'El Salvador', 66);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gnq', 'coverage', 'Equatorial Guinea', 67);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-eri', 'coverage', 'Eritrea', 68);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-est', 'coverage', 'Estonia', 69);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-eth', 'coverage', 'Ethiopia', 70);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-flk', 'coverage', 'Falkland Islands (Malvinas)', 71);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-fro', 'coverage', 'Faroe Islands', 72);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-fji', 'coverage', 'Fiji', 73);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-fin', 'coverage', 'Finland', 74);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-fra', 'coverage', 'France', 75);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-guf', 'coverage', 'French Guiana', 76);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pyf', 'coverage', 'French Polynesia', 77);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-atf', 'coverage', 'French Southern Territories', 78);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gab', 'coverage', 'Gabon', 79);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gmb', 'coverage', 'Gambia', 80);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-geo', 'coverage', 'Georgia', 81);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-deu', 'coverage', 'Germany', 82);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gha', 'coverage', 'Ghana', 83);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gib', 'coverage', 'Gibraltar', 84);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-grc', 'coverage', 'Greece', 85);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-grl', 'coverage', 'Greenland', 86);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-grd', 'coverage', 'Grenada', 87);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-glp', 'coverage', 'Guadeloupe', 88);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gum', 'coverage', 'Guam', 89);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gtm', 'coverage', 'Guatemala', 90);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ggy', 'coverage', 'Guernsey', 91);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gin', 'coverage', 'Guinea', 92);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gnb', 'coverage', 'Guinea-Bissau', 93);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-guy', 'coverage', 'Guyana', 94);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hti', 'coverage', 'Haiti', 95);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hmd', 'coverage', 'Heard Island and McDonald Islands', 96);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vat', 'coverage', 'Holy See', 97);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hnd', 'coverage', 'Honduras', 98);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hkg', 'coverage', 'Hong Kong', 99);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-hun', 'coverage', 'Hungary', 100);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-isl', 'coverage', 'Iceland', 101);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ind', 'coverage', 'India', 102);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-idn', 'coverage', 'Indonesia', 103);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-irn', 'coverage', 'Iran (Islamic Republic of)', 104);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-irq', 'coverage', 'Iraq', 105);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-irl', 'coverage', 'Ireland', 106);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-imn', 'coverage', 'Isle of Man', 107);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-isr', 'coverage', 'Israel', 108);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ita', 'coverage', 'Italy', 109);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-jam', 'coverage', 'Jamaica', 110);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-jpn', 'coverage', 'Japan', 111);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-jey', 'coverage', 'Jersey', 112);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-jor', 'coverage', 'Jordan', 113);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kaz', 'coverage', 'Kazakhstan', 114);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ken', 'coverage', 'Kenya', 115);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kir', 'coverage', 'Kiribati', 116);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-prk', 'coverage', 'Korea (Democratic People''s Republic of)', 117);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kor', 'coverage', 'Korea (Republic of)', 118);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kwt', 'coverage', 'Kuwait', 119);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kgz', 'coverage', 'Kyrgyzstan', 120);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lao', 'coverage', 'Lao People''s Democratic Republic', 121);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lva', 'coverage', 'Latvia', 122);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lbn', 'coverage', 'Lebanon', 123);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lso', 'coverage', 'Lesotho', 124);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lbr', 'coverage', 'Liberia', 125);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lby', 'coverage', 'Libya', 126);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lie', 'coverage', 'Liechtenstein', 127);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ltu', 'coverage', 'Lithuania', 128);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lux', 'coverage', 'Luxembourg', 129);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mac', 'coverage', 'Macao', 130);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mkd', 'coverage', 'Macedonia (the former Yugoslav Republic of)', 131);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mdg', 'coverage', 'Madagascar', 132);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mwi', 'coverage', 'Malawi', 133);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mys', 'coverage', 'Malaysia', 134);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mdv', 'coverage', 'Maldives', 135);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mli', 'coverage', 'Mali', 136);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mlt', 'coverage', 'Malta', 137);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mhl', 'coverage', 'Marshall Islands', 138);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mtq', 'coverage', 'Martinique', 139);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mrt', 'coverage', 'Mauritania', 140);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mus', 'coverage', 'Mauritius', 141);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-myt', 'coverage', 'Mayotte', 142);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mex', 'coverage', 'Mexico', 143);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-fsm', 'coverage', 'Micronesia (Federated States of)', 144);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mda', 'coverage', 'Moldova (Republic of)', 145);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mco', 'coverage', 'Monaco', 146);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mng', 'coverage', 'Mongolia', 147);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mne', 'coverage', 'Montenegro', 148);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-msr', 'coverage', 'Montserrat', 149);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mar', 'coverage', 'Morocco', 150);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-moz', 'coverage', 'Mozambique', 151);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mmr', 'coverage', 'Myanmar', 152);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nam', 'coverage', 'Namibia', 153);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nru', 'coverage', 'Nauru', 154);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-npl', 'coverage', 'Nepal', 155);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nld', 'coverage', 'Netherlands', 156);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ncl', 'coverage', 'New Caledonia', 157);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nzl', 'coverage', 'New Zealand', 158);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nic', 'coverage', 'Nicaragua', 159);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ner', 'coverage', 'Niger', 160);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nga', 'coverage', 'Nigeria', 161);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-niu', 'coverage', 'Niue', 162);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nfk', 'coverage', 'Norfolk Island', 163);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-mnp', 'coverage', 'Northern Mariana Islands', 164);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-nor', 'coverage', 'Norway', 165);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-omn', 'coverage', 'Oman', 166);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pak', 'coverage', 'Pakistan', 167);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-plw', 'coverage', 'Palau', 168);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pse', 'coverage', 'Palestine, State of', 169);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pan', 'coverage', 'Panama', 170);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-png', 'coverage', 'Papua New Guinea', 171);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pry', 'coverage', 'Paraguay', 172);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-per', 'coverage', 'Peru', 173);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-phl', 'coverage', 'Philippines', 174);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pcn', 'coverage', 'Pitcairn', 175);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pol', 'coverage', 'Poland', 176);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-prt', 'coverage', 'Portugal', 177);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-pri', 'coverage', 'Puerto Rico', 178);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-qat', 'coverage', 'Qatar', 179);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-reu', 'coverage', 'Réunion', 180);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-rou', 'coverage', 'Romania', 181);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-rus', 'coverage', 'Russian Federation', 182);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-rwa', 'coverage', 'Rwanda', 183);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-blm', 'coverage', 'Saint Barthélemy', 184);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-shn', 'coverage', 'Saint Helena, Ascension and Tristan da Cunha', 185);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-kna', 'coverage', 'Saint Kitts and Nevis', 186);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lca', 'coverage', 'Saint Lucia', 187);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-maf', 'coverage', 'Saint Martin (French part)', 188);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-spm', 'coverage', 'Saint Pierre and Miquelon', 189);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vct', 'coverage', 'Saint Vincent and the Grenadines', 190);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-wsm', 'coverage', 'Samoa', 191);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-smr', 'coverage', 'San Marino', 192);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-stp', 'coverage', 'Sao Tome and Principe', 193);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sau', 'coverage', 'Saudi Arabia', 194);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sen', 'coverage', 'Senegal', 195);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-srb', 'coverage', 'Serbia', 196);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-syc', 'coverage', 'Seychelles', 197);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sle', 'coverage', 'Sierra Leone', 198);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sgp', 'coverage', 'Singapore', 199);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sxm', 'coverage', 'Sint Maarten (Dutch part)', 200);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-svk', 'coverage', 'Slovakia', 201);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-svn', 'coverage', 'Slovenia', 202);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-slb', 'coverage', 'Solomon Islands', 203);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-som', 'coverage', 'Somalia', 204);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-zaf', 'coverage', 'South Africa', 205);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sgs', 'coverage', 'South Georgia and the South Sandwich Islands', 206);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ssd', 'coverage', 'South Sudan', 207);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-esp', 'coverage', 'Spain', 208);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-lka', 'coverage', 'Sri Lanka', 209);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sdn', 'coverage', 'Sudan', 210);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sur', 'coverage', 'Suriname', 211);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-sjm', 'coverage', 'Svalbard and Jan Mayen', 212);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-swz', 'coverage', 'Swaziland', 213);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-swe', 'coverage', 'Sweden', 214);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-che', 'coverage', 'Switzerland', 215);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-syr', 'coverage', 'Syrian Arab Republic', 216);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-twn', 'coverage', 'Taiwan, Province of China', 217);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tjk', 'coverage', 'Tajikistan', 218);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tza', 'coverage', 'Tanzania, United Republic of', 219);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tha', 'coverage', 'Thailand', 220);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tls', 'coverage', 'Timor-Leste', 221);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tgo', 'coverage', 'Togo', 222);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tkl', 'coverage', 'Tokelau', 223);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ton', 'coverage', 'Tonga', 224);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tto', 'coverage', 'Trinidad and Tobago', 225);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tun', 'coverage', 'Tunisia', 226);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tur', 'coverage', 'Turkey', 227);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tkm', 'coverage', 'Turkmenistan', 228);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tca', 'coverage', 'Turks and Caicos Islands', 229);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-tuv', 'coverage', 'Tuvalu', 230);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-uga', 'coverage', 'Uganda', 231);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ukr', 'coverage', 'Ukraine', 232);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-are', 'coverage', 'United Arab Emirates', 233);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-gbr', 'coverage', 'United Kingdom of Great Britain and Northern Ireland', 234);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-usa', 'coverage', 'United States of America', 235);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-umi', 'coverage', 'United States Minor Outlying Islands', 236);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ury', 'coverage', 'Uruguay', 237);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-uzb', 'coverage', 'Uzbekistan', 238);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vut', 'coverage', 'Vanuatu', 239);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-ven', 'coverage', 'Venezuela (Bolivarian Republic of)', 240);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vnm', 'coverage', 'Viet Nam', 241);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vgb', 'coverage', 'Virgin Islands (British)', 242);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-vir', 'coverage', 'Virgin Islands (U.S.)', 243);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-wlf', 'coverage', 'Wallis and Futuna', 244);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-esh', 'coverage', 'Western Sahara', 245);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-yem', 'coverage', 'Yemen', 246);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-zmb', 'coverage', 'Zambia', 247);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('coverage-zwe', 'coverage', 'Zimbabwe', 248);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-all-rights-reserved', 'rights', 'All Rights Reserved', 0);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-pubic-domain', 'rights', 'Public Domain', 1);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-ancsa', 'rights', 'Attribution-NonCommercial-ShareAlike License', 2);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-anc', 'rights', 'Attribution-NonCommercial License', 3);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-ancnd', 'rights', 'Attribution-NonCommercial-NoDerivs License', 4);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-a', 'rights', 'Attribution License', 5);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-asa', 'rights', 'Attribution-ShareAlike License', 6);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-cc-and', 'rights', 'Attribution-NoDerivs License', 7);
insert into T_VOCABULARY(VOC_ID_C, VOC_NAME_C, VOC_VALUE_C, VOC_ORDER_N) values('rights-no-copyright', 'rights', 'No known copyright restrictions', 8);
update T_CONFIG set CFG_VALUE_C = '6' where CFG_ID_C = 'DB_VERSION';

View File

@ -23,7 +23,7 @@ public class TestJpa extends BaseTransactionalTest {
user.setStorageCurrent(0l);
user.setStorageQuota(10l);
user.setPrivateKey("AwesomePrivateKey");
String id = userDao.create(user);
String id = userDao.create(user, "me");
TransactionUtil.commit();

View File

@ -28,7 +28,7 @@
<com.h2database.h2.version>1.4.191</com.h2database.h2.version>
<org.glassfish.jersey.version>2.22.1</org.glassfish.jersey.version>
<org.mindrot.jbcrypt>0.3m</org.mindrot.jbcrypt>
<org.apache.lucene.version>4.2.0</org.apache.lucene.version>
<org.apache.lucene.version>5.5.0</org.apache.lucene.version>
<org.imgscalr.imgscalr-lib.version>4.2</org.imgscalr.imgscalr-lib.version>
<org.apache.pdfbox.pdfbox.version>2.0.0-RC3</org.apache.pdfbox.pdfbox.version>
<org.bouncycastle.bcprov-jdk15on.version>1.54</org.bouncycastle.bcprov-jdk15on.version>
@ -341,7 +341,7 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId>
<artifactId>lucene-backward-codecs</artifactId>
<version>${org.apache.lucene.version}</version>
</dependency>

View File

@ -143,6 +143,12 @@ public class ValidationUtil {
}
}
public static void validateRegex(String s, String name, String regex) throws ClientException {
if (!Pattern.compile(regex).matcher(s).matches()) {
throw new ClientException("ValidationError", MessageFormat.format("{0} must match {1}", name, regex));
}
}
/**
* Checks if the string is a number.
*

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=5
db.version=6

View File

@ -77,7 +77,7 @@ public class AclResource extends BaseResource {
// Avoid duplicates
if (!aclDao.checkPermission(acl.getSourceId(), acl.getPerm(), acl.getTargetId())) {
aclDao.create(acl);
aclDao.create(acl, principal.getId());
// Returns the ACL
JsonObjectBuilder response = Json.createObjectBuilder()
@ -126,7 +126,7 @@ public class AclResource extends BaseResource {
}
// Delete the ACL
aclDao.delete(sourceId, perm, targetId);
aclDao.delete(sourceId, perm, targetId, principal.getId());
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()

View File

@ -68,6 +68,7 @@ public class AuditLogResource extends BaseResource {
for (AuditLogDto auditLogDto : paginatedList.getResultList()) {
logs.add(Json.createObjectBuilder()
.add("id", auditLogDto.getId())
.add("username", auditLogDto.getUsername())
.add("target", auditLogDto.getEntityId())
.add("class", auditLogDto.getEntityClass())
.add("type", auditLogDto.getType().name())

View File

@ -61,7 +61,7 @@ public class CommentResource extends BaseResource {
comment.setContent(content);
comment.setUserId(principal.getId());
CommentDao commentDao = new CommentDao();
commentDao.create(comment);
commentDao.create(comment, principal.getId());
// Returns the comment
JsonObjectBuilder response = Json.createObjectBuilder()
@ -86,9 +86,6 @@ public class CommentResource extends BaseResource {
throw new ForbiddenClientException();
}
// Validate input data
ValidationUtil.validateRequired(id, "id");
// Get the comment
CommentDao commentDao = new CommentDao();
Comment comment = commentDao.getActiveById(id);
@ -106,7 +103,7 @@ public class CommentResource extends BaseResource {
}
// Delete the comment
commentDao.delete(id);
commentDao.delete(id, principal.getId());
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()

View File

@ -40,12 +40,14 @@ import com.google.common.io.ByteStreams;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.jpa.AclDao;
import com.sismics.docs.core.dao.jpa.ContributorDao;
import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.TagDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.jpa.dto.AclDto;
import com.sismics.docs.core.dao.jpa.dto.ContributorDto;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.dao.jpa.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
@ -128,7 +130,14 @@ public class DocumentResource extends BaseResource {
}
// Below is specific to GET /document/id
document.add("subject", JsonUtil.nullable(documentDto.getSubject()));
document.add("identifier", JsonUtil.nullable(documentDto.getIdentifier()));
document.add("publisher", JsonUtil.nullable(documentDto.getPublisher()));
document.add("format", JsonUtil.nullable(documentDto.getFormat()));
document.add("source", JsonUtil.nullable(documentDto.getSource()));
document.add("type", JsonUtil.nullable(documentDto.getType()));
document.add("coverage", JsonUtil.nullable(documentDto.getCoverage()));
document.add("rights", JsonUtil.nullable(documentDto.getRights()));
document.add("creator", documentDto.getCreator());
// Add ACL
@ -152,6 +161,17 @@ public class DocumentResource extends BaseResource {
document.add("acls", aclList)
.add("writable", writable);
// Add contributors
ContributorDao contributorDao = new ContributorDao();
List<ContributorDto> contributorDtoList = contributorDao.getByDocumentId(documentId);
JsonArrayBuilder contributorList = Json.createArrayBuilder();
for (ContributorDto contributorDto : contributorDtoList) {
contributorList.add(Json.createObjectBuilder()
.add("username", contributorDto.getUsername())
.add("email", contributorDto.getEmail()));
}
document.add("contributors", contributorList);
return Response.ok().entity(document.build()).build();
}
@ -288,6 +308,7 @@ public class DocumentResource extends BaseResource {
}
TagDao tagDao = new TagDao();
UserDao userDao = new UserDao();
DateTimeParser[] parsers = {
DateTimeFormat.forPattern("yyyy").getParser(),
DateTimeFormat.forPattern("yyyy-MM").getParser(),
@ -363,6 +384,16 @@ public class DocumentResource extends BaseResource {
if (Constants.SUPPORTED_LANGUAGES.contains(params[1])) {
documentCriteria.setLanguage(params[1]);
}
} else if (params[0].equals("by")) {
// New creator criteria
User user = userDao.getActiveByUsername(params[1]);
if (user == null) {
// This user doesn't exists, return nothing
documentCriteria.setCreatorId(UUID.randomUUID().toString());
} else {
// This user exists, search its documents
documentCriteria.setCreatorId(user.getId());
}
} else if (params[0].equals("full")) {
// New full content search criteria
fullQuery.add(params[1]);
@ -390,6 +421,14 @@ public class DocumentResource extends BaseResource {
public Response add(
@FormParam("title") String title,
@FormParam("description") String description,
@FormParam("subject") String subject,
@FormParam("identifier") String identifier,
@FormParam("publisher") String publisher,
@FormParam("format") String format,
@FormParam("source") String source,
@FormParam("type") String type,
@FormParam("coverage") String coverage,
@FormParam("rights") String rights,
@FormParam("tags") List<String> tagList,
@FormParam("language") String language,
@FormParam("create_date") String createDateStr) {
@ -401,6 +440,14 @@ public class DocumentResource extends BaseResource {
title = ValidationUtil.validateLength(title, "title", 1, 100, false);
language = ValidationUtil.validateLength(language, "language", 3, 3, false);
description = ValidationUtil.validateLength(description, "description", 0, 4000, true);
subject = ValidationUtil.validateLength(subject, "subject", 0, 500, true);
identifier = ValidationUtil.validateLength(identifier, "identifier", 0, 500, true);
publisher = ValidationUtil.validateLength(publisher, "publisher", 0, 500, true);
format = ValidationUtil.validateLength(format, "format", 0, 500, true);
source = ValidationUtil.validateLength(source, "source", 0, 500, true);
type = ValidationUtil.validateLength(type, "type", 0, 100, true);
coverage = ValidationUtil.validateLength(coverage, "coverage", 0, 100, true);
rights = ValidationUtil.validateLength(rights, "rights", 0, 100, true);
Date createDate = ValidationUtil.validateDate(createDateStr, "create_date", true);
if (!Constants.SUPPORTED_LANGUAGES.contains(language)) {
throw new ClientException("ValidationError", MessageFormat.format("{0} is not a supported language", language));
@ -412,13 +459,21 @@ public class DocumentResource extends BaseResource {
document.setUserId(principal.getId());
document.setTitle(title);
document.setDescription(description);
document.setSubject(subject);
document.setIdentifier(identifier);
document.setPublisher(publisher);
document.setFormat(format);
document.setSource(source);
document.setType(type);
document.setCoverage(coverage);
document.setRights(rights);
document.setLanguage(language);
if (createDate == null) {
document.setCreateDate(new Date());
} else {
document.setCreateDate(createDate);
}
String documentId = documentDao.create(document);
String documentId = documentDao.create(document, principal.getId());
// Create read ACL
AclDao aclDao = new AclDao();
@ -426,20 +481,21 @@ public class DocumentResource extends BaseResource {
acl.setPerm(PermType.READ);
acl.setSourceId(documentId);
acl.setTargetId(principal.getId());
aclDao.create(acl);
aclDao.create(acl, principal.getId());
// Create write ACL
acl = new Acl();
acl.setPerm(PermType.WRITE);
acl.setSourceId(documentId);
acl.setTargetId(principal.getId());
aclDao.create(acl);
aclDao.create(acl, principal.getId());
// Update tags
updateTagList(documentId, tagList);
// Raise a document created event
DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent();
documentCreatedAsyncEvent.setUserId(principal.getId());
documentCreatedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentCreatedAsyncEvent);
@ -461,6 +517,14 @@ public class DocumentResource extends BaseResource {
@PathParam("id") String id,
@FormParam("title") String title,
@FormParam("description") String description,
@FormParam("subject") String subject,
@FormParam("identifier") String identifier,
@FormParam("publisher") String publisher,
@FormParam("format") String format,
@FormParam("source") String source,
@FormParam("type") String type,
@FormParam("coverage") String coverage,
@FormParam("rights") String rights,
@FormParam("tags") List<String> tagList,
@FormParam("language") String language,
@FormParam("create_date") String createDateStr) {
@ -472,6 +536,14 @@ public class DocumentResource extends BaseResource {
title = ValidationUtil.validateLength(title, "title", 1, 100, true);
language = ValidationUtil.validateLength(language, "language", 3, 3, true);
description = ValidationUtil.validateLength(description, "description", 0, 4000, true);
subject = ValidationUtil.validateLength(subject, "subject", 0, 500, true);
identifier = ValidationUtil.validateLength(identifier, "identifier", 0, 500, true);
publisher = ValidationUtil.validateLength(publisher, "publisher", 0, 500, true);
format = ValidationUtil.validateLength(format, "format", 0, 500, true);
source = ValidationUtil.validateLength(source, "source", 0, 500, true);
type = ValidationUtil.validateLength(type, "type", 0, 100, true);
coverage = ValidationUtil.validateLength(coverage, "coverage", 0, 100, true);
rights = ValidationUtil.validateLength(rights, "rights", 0, 100, true);
Date createDate = ValidationUtil.validateDate(createDateStr, "create_date", true);
if (language != null && !Constants.SUPPORTED_LANGUAGES.contains(language)) {
throw new ClientException("ValidationError", MessageFormat.format("{0} is not a supported language", language));
@ -492,6 +564,30 @@ public class DocumentResource extends BaseResource {
if (!StringUtils.isEmpty(description)) {
document.setDescription(description);
}
if (!StringUtils.isEmpty(subject)) {
document.setSubject(subject);
}
if (!StringUtils.isEmpty(identifier)) {
document.setIdentifier(identifier);
}
if (!StringUtils.isEmpty(publisher)) {
document.setPublisher(publisher);
}
if (!StringUtils.isEmpty(format)) {
document.setFormat(format);
}
if (!StringUtils.isEmpty(source)) {
document.setSource(source);
}
if (!StringUtils.isEmpty(type)) {
document.setType(type);
}
if (!StringUtils.isEmpty(coverage)) {
document.setCoverage(coverage);
}
if (!StringUtils.isEmpty(rights)) {
document.setRights(rights);
}
if (createDate != null) {
document.setCreateDate(createDate);
}
@ -499,13 +595,14 @@ public class DocumentResource extends BaseResource {
document.setLanguage(language);
}
document = documentDao.update(document);
document = documentDao.update(document, principal.getId());
// Update tags
updateTagList(id, tagList);
// Raise a document updated event
DocumentUpdatedAsyncEvent documentUpdatedAsyncEvent = new DocumentUpdatedAsyncEvent();
documentUpdatedAsyncEvent.setUserId(principal.getId());
documentUpdatedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentUpdatedAsyncEvent);
@ -563,17 +660,19 @@ public class DocumentResource extends BaseResource {
}
// Delete the document
documentDao.delete(document.getId());
documentDao.delete(document.getId(), principal.getId());
// Raise file deleted events
// Raise file deleted events (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFile(file);
AppContext.getInstance().getAsyncEventBus().post(fileDeletedAsyncEvent);
}
// Raise a document deleted event
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentDeletedAsyncEvent);

View File

@ -44,6 +44,7 @@ import com.sismics.docs.core.dao.jpa.DocumentDao;
import com.sismics.docs.core.dao.jpa.FileDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.dao.jpa.dto.DocumentDto;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.event.FileCreatedAsyncEvent;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
@ -145,7 +146,7 @@ public class FileResource extends BaseResource {
file.setDocumentId(documentId);
file.setMimeType(mimeType);
file.setUserId(principal.getId());
String fileId = fileDao.create(file);
String fileId = fileDao.create(file, principal.getId());
// Guess the mime type a second time, for open document format (first detected as simple ZIP file)
file.setMimeType(MimeTypeUtil.guessOpenDocumentFormat(file, fileInputStream));
@ -160,14 +161,20 @@ public class FileResource extends BaseResource {
user.setStorageCurrent(user.getStorageCurrent() + fileData.length);
userDao.updateQuota(user);
// Raise a new file created event if we have a document
// Raise a new file created event and document updated event if we have a document
if (documentId != null) {
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
fileCreatedAsyncEvent.setUserId(principal.getId());
fileCreatedAsyncEvent.setDocument(document);
fileCreatedAsyncEvent.setFile(file);
fileCreatedAsyncEvent.setInputStream(fileInputStream);
fileCreatedAsyncEvent.setPdfInputStream(pdfIntputStream);
AppContext.getInstance().getAsyncEventBus().post(fileCreatedAsyncEvent);
DocumentUpdatedAsyncEvent documentUpdatedAsyncEvent = new DocumentUpdatedAsyncEvent();
documentUpdatedAsyncEvent.setUserId(principal.getId());
documentUpdatedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentUpdatedAsyncEvent);
}
// Always return OK
@ -222,16 +229,22 @@ public class FileResource extends BaseResource {
file.setOrder(fileDao.getByDocumentId(principal.getId(), documentId).size());
fileDao.update(file);
// Raise a new file created event (it wasn't sent during file creation)
// Raise a new file created event and document updated event (it wasn't sent during file creation)
try {
java.nio.file.Path storedFile = DirectoryUtil.getStorageDirectory().resolve(id);
InputStream fileInputStream = Files.newInputStream(storedFile);
final InputStream responseInputStream = EncryptionUtil.decryptInputStream(fileInputStream, user.getPrivateKey());
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
fileCreatedAsyncEvent.setUserId(principal.getId());
fileCreatedAsyncEvent.setDocument(document);
fileCreatedAsyncEvent.setFile(file);
fileCreatedAsyncEvent.setInputStream(responseInputStream);
AppContext.getInstance().getAsyncEventBus().post(fileCreatedAsyncEvent);
DocumentUpdatedAsyncEvent documentUpdatedAsyncEvent = new DocumentUpdatedAsyncEvent();
documentUpdatedAsyncEvent.setUserId(principal.getId());
documentUpdatedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentUpdatedAsyncEvent);
} catch (Exception e) {
throw new ClientException("AttachError", "Error attaching file to document", e);
}
@ -351,18 +364,19 @@ public class FileResource extends BaseResource {
return Response.status(Status.NOT_FOUND).build();
}
Document document = null;
if (file.getDocumentId() == null) {
// It's an orphan file
if (!file.getUserId().equals(principal.getId())) {
// But not ours
throw new ForbiddenClientException();
}
} else if (documentDao.getDocument(file.getDocumentId(), PermType.WRITE, principal.getId()) == null) {
} else if ((document = documentDao.getDocument(file.getDocumentId(), PermType.WRITE, principal.getId())) == null) {
return Response.status(Status.NOT_FOUND).build();
}
// Delete the file
fileDao.delete(file.getId());
fileDao.delete(file.getId(), principal.getId());
// Update the user quota
UserDao userDao = new UserDao();
@ -377,9 +391,18 @@ public class FileResource extends BaseResource {
// Raise a new file deleted event
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFile(file);
AppContext.getInstance().getAsyncEventBus().post(fileDeletedAsyncEvent);
if (document != null) {
// Raise a new document updated
DocumentUpdatedAsyncEvent documentUpdatedAsyncEvent = new DocumentUpdatedAsyncEvent();
documentUpdatedAsyncEvent.setUserId(principal.getId());
documentUpdatedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentUpdatedAsyncEvent);
}
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");

View File

@ -69,7 +69,7 @@ public class ShareResource extends BaseResource {
acl.setSourceId(documentId);
acl.setPerm(PermType.READ);
acl.setTargetId(share.getId());
aclDao.create(acl);
aclDao.create(acl, principal.getId());
// Returns the created ACL
JsonObjectBuilder response = Json.createObjectBuilder()

View File

@ -137,7 +137,7 @@ public class TagResource extends BaseResource {
tag.setColor(color);
tag.setUserId(principal.getId());
tag.setParentId(parentId);
String id = tagDao.create(tag);
String id = tagDao.create(tag, principal.getId());
JsonObjectBuilder response = Json.createObjectBuilder()
.add("id", id);
@ -203,7 +203,7 @@ public class TagResource extends BaseResource {
// Parent tag is always updated to have the possibility to delete it
tag.setParentId(parentId);
tagDao.update(tag);
tagDao.update(tag, principal.getId());
JsonObjectBuilder response = Json.createObjectBuilder()
.add("id", id);
@ -232,7 +232,7 @@ public class TagResource extends BaseResource {
}
// Delete the tag
tagDao.delete(tagId);
tagDao.delete(tagId, principal.getId());
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()

View File

@ -74,7 +74,6 @@ public class UserResource extends BaseResource {
@FormParam("password") String password,
@FormParam("email") String email,
@FormParam("storage_quota") String storageQuotaStr) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
@ -106,7 +105,7 @@ public class UserResource extends BaseResource {
// Create the user
UserDao userDao = new UserDao();
try {
userDao.create(user);
userDao.create(user, principal.getId());
} catch (Exception e) {
if ("AlreadyExistingUsername".equals(e.getMessage())) {
throw new ServerException("AlreadyExistingUsername", "Login already used", e);
@ -132,7 +131,6 @@ public class UserResource extends BaseResource {
public Response update(
@FormParam("password") String password,
@FormParam("email") String email) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
@ -147,12 +145,12 @@ public class UserResource extends BaseResource {
if (email != null) {
user.setEmail(email);
}
user = userDao.update(user);
user = userDao.update(user, principal.getId());
// Change the password
if (StringUtils.isNotBlank(password)) {
user.setPassword(password);
userDao.updatePassword(user);
userDao.updatePassword(user, principal.getId());
}
// Always return OK
@ -176,7 +174,6 @@ public class UserResource extends BaseResource {
@FormParam("password") String password,
@FormParam("email") String email,
@FormParam("storage_quota") String storageQuotaStr) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
@ -201,12 +198,12 @@ public class UserResource extends BaseResource {
Long storageQuota = ValidationUtil.validateLong(storageQuotaStr, "storage_quota");
user.setStorageQuota(storageQuota);
}
user = userDao.update(user);
user = userDao.update(user, principal.getId());
// Change the password
if (StringUtils.isNotBlank(password)) {
user.setPassword(password);
userDao.updatePassword(user);
userDao.updatePassword(user, principal.getId());
}
// Always return OK
@ -225,7 +222,6 @@ public class UserResource extends BaseResource {
@Path("check_username")
public Response checkUsername(
@QueryParam("username") String username) {
UserDao userDao = new UserDao();
User user = userDao.getActiveByUsername(username);
@ -255,7 +251,6 @@ public class UserResource extends BaseResource {
@FormParam("username") String username,
@FormParam("password") String password,
@FormParam("remember") boolean longLasted) {
// Validate the input data
username = StringUtils.strip(username);
password = StringUtils.strip(password);
@ -361,18 +356,20 @@ public class UserResource extends BaseResource {
// Delete the user
UserDao userDao = new UserDao();
userDao.delete(principal.getName());
userDao.delete(principal.getName(), principal.getId());
// Raise deleted events for documents
for (Document document : documentList) {
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentDeletedAsyncEvent);
}
// Raise deleted events for files
// Raise deleted events for files (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFile(file);
AppContext.getInstance().getAsyncEventBus().post(fileDeletedAsyncEvent);
}
@ -418,18 +415,20 @@ public class UserResource extends BaseResource {
List<File> fileList = fileDao.findByUserId(user.getId());
// Delete the user
userDao.delete(user.getUsername());
userDao.delete(user.getUsername(), principal.getId());
// Raise deleted events for documents
for (Document document : documentList) {
DocumentDeletedAsyncEvent documentDeletedAsyncEvent = new DocumentDeletedAsyncEvent();
documentDeletedAsyncEvent.setUserId(principal.getId());
documentDeletedAsyncEvent.setDocument(document);
AppContext.getInstance().getAsyncEventBus().post(documentDeletedAsyncEvent);
}
// Raise deleted events for files
// Raise deleted events for files (don't bother sending document updated event)
for (File file : fileList) {
FileDeletedAsyncEvent fileDeletedAsyncEvent = new FileDeletedAsyncEvent();
fileDeletedAsyncEvent.setUserId(principal.getId());
fileDeletedAsyncEvent.setFile(file);
AppContext.getInstance().getAsyncEventBus().post(fileDeletedAsyncEvent);
}
@ -489,7 +488,6 @@ public class UserResource extends BaseResource {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
UserDao userDao = new UserDao();
User user = userDao.getActiveByUsername(username);
@ -524,7 +522,6 @@ public class UserResource extends BaseResource {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
JsonArrayBuilder users = Json.createArrayBuilder();
PaginatedList<UserDto> paginatedList = PaginatedLists.create(limit, offset);

View File

@ -0,0 +1,183 @@
package com.sismics.docs.rest.resource;
import java.util.List;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.sismics.docs.core.dao.jpa.VocabularyDao;
import com.sismics.docs.core.model.jpa.Vocabulary;
import com.sismics.docs.rest.constant.BaseFunction;
import com.sismics.rest.exception.ForbiddenClientException;
import com.sismics.rest.util.ValidationUtil;
/**
* Vocabulary REST resources.
*
* @author bgamard
*/
@Path("/vocabulary")
public class VocabularyResource extends BaseResource {
@GET
@Path("{name: [a-z0-9\\-]+}")
public Response get(@PathParam("name") String name) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
// Assemble results
VocabularyDao vocabularyDao = new VocabularyDao();
List<Vocabulary> vocabularyList = vocabularyDao.getByName(name);
JsonArrayBuilder entries = Json.createArrayBuilder();
for (Vocabulary vocabulary : vocabularyList) {
entries.add(Json.createObjectBuilder()
.add("id", vocabulary.getId())
.add("name", vocabulary.getName())
.add("value", vocabulary.getValue())
.add("order", vocabulary.getOrder()));
}
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("entries", entries);
return Response.ok().entity(response.build()).build();
}
/**
* Add a vocabulary.
*
* @param name Name
* @param value Value
* @param order Order
* @return Response
*/
@PUT
public Response add(@FormParam("name") String name,
@FormParam("value") String value,
@FormParam("order") String orderStr) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
// Validate input data
name = ValidationUtil.validateLength(name, "name", 1, 50, false);
ValidationUtil.validateRegex(name, "name", "[a-z0-9\\-]+");
value = ValidationUtil.validateLength(value, "value", 1, 500, false);
Integer order = ValidationUtil.validateInteger(orderStr, "order");
// Create the vocabulary
VocabularyDao vocabularyDao = new VocabularyDao();
Vocabulary vocabulary = new Vocabulary();
vocabulary.setName(name);
vocabulary.setValue(value);
vocabulary.setOrder(order);
vocabularyDao.create(vocabulary);
// Returns the vocabulary
JsonObjectBuilder response = Json.createObjectBuilder()
.add("id", vocabulary.getId())
.add("name", vocabulary.getName())
.add("value", vocabulary.getValue())
.add("order", vocabulary.getOrder());
return Response.ok().entity(response.build()).build();
}
/**
* Update a vocabulary.
*
* @param name Name
* @param value Value
* @param order Order
* @return Response
*/
@POST
@Path("{id: [a-z0-9\\-]+}")
public Response update(@PathParam("id") String id,
@FormParam("name") String name,
@FormParam("value") String value,
@FormParam("order") String orderStr) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
// Validate input data
name = ValidationUtil.validateLength(name, "name", 1, 50, true);
if (name != null) {
ValidationUtil.validateRegex(name, "name", "[a-z0-9\\-]+");
}
value = ValidationUtil.validateLength(value, "value", 1, 500, true);
Integer order = null;
if (orderStr != null) {
order = ValidationUtil.validateInteger(orderStr, "order");
}
// Get the vocabulary entry
VocabularyDao vocabularyDao = new VocabularyDao();
Vocabulary vocabulary = vocabularyDao.getById(id);
if (vocabulary == null) {
return Response.status(Status.NOT_FOUND).build();
}
// Update the vocabulary
if (name != null) {
vocabulary.setName(name);
}
if (value != null) {
vocabulary.setValue(value);
}
if (order != null) {
vocabulary.setOrder(order);
}
vocabularyDao.update(vocabulary);
// Returns the vocabulary
JsonObjectBuilder response = Json.createObjectBuilder()
.add("id", vocabulary.getId())
.add("name", vocabulary.getName())
.add("value", vocabulary.getValue())
.add("order", vocabulary.getOrder());
return Response.ok().entity(response.build()).build();
}
/**
* Delete a vocabulary.
*
* @param id ID
* @return Response
*/
@DELETE
@Path("{id: [a-z0-9\\-]+}")
public Response delete(@PathParam("id") String id) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
// Get the vocabulary
VocabularyDao vocabularyDao = new VocabularyDao();
Vocabulary vocabulary = vocabularyDao.getById(id);
if (vocabulary == null) {
return Response.status(Status.NOT_FOUND).build();
}
// Delete the vocabulary
vocabularyDao.delete(id);
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");
return Response.ok().entity(response.build()).build();
}
}

View File

@ -106,6 +106,15 @@ angular.module('docs',
}
}
})
.state('settings.vocabulary', {
url: '/vocabulary',
views: {
'settings': {
templateUrl: 'partial/docs/settings.vocabulary.html',
controller: 'SettingsVocabulary'
}
}
})
.state('document', {
url: '/document',
abstract: true,
@ -207,7 +216,25 @@ angular.module('docs',
controller: 'Login'
}
}
});
})
.state('user', {
url: '/user',
views: {
'page': {
templateUrl: 'partial/docs/user.html',
controller: 'User'
}
}
})
.state('user.profile', {
url: '/:username',
views: {
'user': {
templateUrl: 'partial/docs/user.profile.html',
controller: 'UserProfile'
}
}
});
// Configuring Restangular
RestangularProvider.setBaseUrl('../api');

View File

@ -7,6 +7,9 @@ angular.module('docs').controller('DocumentEdit', function($rootScope, $scope, $
// Alerts
$scope.alerts = [];
// Vocabularies
$scope.vocabularies = [];
// Orphan files to add
$scope.orphanFiles = $stateParams.files ? $stateParams.files.split(',') : [];
@ -219,4 +222,14 @@ angular.module('docs').controller('DocumentEdit', function($rootScope, $scope, $
} else {
$scope.resetForm();
}
// Load vocabularies
$scope.loadVocabulary = function(name) {
Restangular.one('vocabulary', name).get().then(function(result) {
$scope.vocabularies[name] = result.entries;
});
};
$scope.loadVocabulary('type');
$scope.loadVocabulary('coverage');
$scope.loadVocabulary('rights');
});

View File

@ -0,0 +1,42 @@
'use strict';
/**
* Settings vocabulary page controller.
*/
angular.module('docs').controller('SettingsVocabulary', function($scope, Restangular) {
$scope.entries = [];
// Watch for vocabulary selection change
$scope.$watch('vocabulary', function(name) {
if (_.isUndefined(name) || name == '') {
$scope.entries = [];
return;
}
// Load entries
Restangular.one('vocabulary', name).get().then(function(result) {
$scope.entries = result.entries;
});
});
// Delete an entry
$scope.deleteEntry = function(entry) {
Restangular.one('vocabulary', entry.id).remove().then(function() {
$scope.entries.splice($scope.entries.indexOf(entry), 1);
});
};
// Update an entry
$scope.updateEntry = function(entry) {
Restangular.one('vocabulary', entry.id).post('', entry);
};
// Add an entry
$scope.addEntry = function(entry) {
entry.name = $scope.vocabulary;
Restangular.one('vocabulary').put(entry).then(function() {
$scope.entries.push(entry);
$scope.entry = {};
});
};
});

View File

@ -0,0 +1,16 @@
'use strict';
/**
* User controller.
*/
angular.module('docs').controller('User', function(Restangular, $scope, $state) {
// Load users
Restangular.one('user/list').get({ limit: 100 }).then(function(data) {
$scope.users = data.users;
});
// Open a user
$scope.openUser = function(user) {
$state.go('user.profile', { username: user.username });
};
});

View File

@ -0,0 +1,11 @@
'use strict';
/**
* User profile controller.
*/
angular.module('docs').controller('UserProfile', function($stateParams, Restangular, $scope) {
// Load user
Restangular.one('user', $stateParams.username).get().then(function(data) {
$scope.user = data;
});
});

View File

@ -60,6 +60,9 @@
<script src="app/docs/controller/SettingsLog.js" type="text/javascript"></script>
<script src="app/docs/controller/SettingsUser.js" type="text/javascript"></script>
<script src="app/docs/controller/SettingsUserEdit.js" type="text/javascript"></script>
<script src="app/docs/controller/SettingsVocabulary.js" type="text/javascript"></script>
<script src="app/docs/controller/User.js" type="text/javascript"></script>
<script src="app/docs/controller/UserProfile.js" type="text/javascript"></script>
<script src="app/docs/service/User.js" type="text/javascript"></script>
<script src="app/docs/service/Tag.js" type="text/javascript"></script>
<script src="app/docs/filter/Newline.js" type="text/javascript"></script>
@ -103,6 +106,9 @@
<li ng-class="{active: $uiRoute}" ui-route="/tag.*">
<a href="#/tag"><span class="glyphicon glyphicon-tags"></span> Tags</a>
</li>
<li ng-class="{active: $uiRoute}" ui-route="/user.*">
<a href="#/user"><span class="glyphicon glyphicon-user"></span> Users</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right" ng-show="!userInfo.anonymous">

View File

@ -1,6 +1,12 @@
<table class="table">
<tr ng-repeat="log in logs">
<td>{{ log.create_date | date: 'yyyy-MM-dd HH:mm' }}</td>
<td width="20%">{{ log.create_date | date: 'yyyy-MM-dd HH:mm' }}</td>
<td width="20%">
<a ng-href="#/user/{{ log.username }}">
<span class="glyphicon glyphicon-user"></span>
{{ log.username }}
</a>
</td>
<td>
{{ log.class }}
<span ng-switch="log.type">

View File

@ -2,56 +2,124 @@
<div ng-show="document || !isEdit()">
<form name="documentForm" class="form-horizontal">
<div class="form-group" ng-class="{ 'has-error': !documentForm.title.$valid }">
<label class="col-sm-2 control-label" for="inputTitle">Title</label>
<div class="col-sm-10">
<input required ng-maxlength="100" class="form-control" type="text" id="inputTitle"
placeholder="Title" name="title" ng-model="document.title" autocomplete="off"
typeahead="document for document in getTitleTypeahead($viewValue) | filter: $viewValue"
typeahead-wait-ms="200" ng-disabled="fileIsUploading" />
<fieldset>
<legend>Primary metadata</legend>
<div class="form-group" ng-class="{ 'has-error': !documentForm.title.$valid }">
<label class="col-sm-2 control-label" for="inputTitle">Title</label>
<div class="col-sm-10">
<input required ng-maxlength="100" class="form-control" type="text" id="inputTitle"
placeholder="The nature or genre of the resource" name="title" ng-model="document.title" autocomplete="off"
typeahead="document for document in getTitleTypeahead($viewValue) | filter: $viewValue"
typeahead-wait-ms="200" ng-disabled="fileIsUploading" />
</div>
</div>
</div>
<div class="form-group" ng-class="{ 'has-error': !documentForm.description.$valid }">
<label class="col-sm-2 control-label" for="inputDescription">Description</label>
<div class="col-sm-10">
<textarea ng-maxlength="4000" class="form-control" rows="5" id="inputDescription"
name="description" ng-model="document.description" ng-disabled="fileIsUploading"></textarea>
<div class="form-group" ng-class="{ 'has-error': !documentForm.description.$valid }">
<label class="col-sm-2 control-label" for="inputDescription">Description</label>
<div class="col-sm-10">
<textarea ng-maxlength="4000" class="form-control" rows="5" id="inputDescription" placeholder="An account of the resource"
name="description" ng-model="document.description" ng-disabled="fileIsUploading"></textarea>
</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputCreateDate">Creation date</label>
<div class="col-sm-10">
<input type="text" id="inputCreateDate" ng-readonly="true" datepicker-popup="yyyy-MM-dd" class="form-control"
ng-model="document.create_date" starting-day="1" show-weeks="false" ng-disabled="fileIsUploading" />
<div class="form-group">
<label class="col-sm-2 control-label" for="inputCreateDate">Creation date</label>
<div class="col-sm-10">
<input type="text" id="inputCreateDate" ng-readonly="true" datepicker-popup="yyyy-MM-dd" class="form-control"
ng-model="document.create_date" starting-day="1" show-weeks="false" ng-disabled="fileIsUploading" />
</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputLanguage">Language</label>
<div class="col-sm-10">
<select class="form-control" id="inputLanguage" ng-model="document.language" ng-disabled="fileIsUploading">
<option value="fra">French</option>
<option value="eng">English</option>
<option value="jpn">Japanese</option>
</select>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputLanguage">Language</label>
<div class="col-sm-10">
<select class="form-control" id="inputLanguage" ng-model="document.language" ng-disabled="fileIsUploading">
<option value="fra">French</option>
<option value="eng">English</option>
<option value="jpn">Japanese</option>
</select>
</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputFiles">New files</label>
<div class="col-sm-6">
<file class="form-control" id="inputFiles" multiple="multiple" ng-model="newFiles"
accept="image/png,image/jpg,image/jpeg,image/gif,application/pdf,application/vnd.oasis.opendocument.text,application/vnd.openxmlformats-officedocument.wordprocessingml.document"
ng-disabled="fileIsUploading"></file>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputFiles">New files</label>
<div class="col-sm-6">
<file class="form-control" id="inputFiles" multiple="multiple" ng-model="newFiles"
accept="image/png,image/jpg,image/jpeg,image/gif,application/pdf,application/vnd.oasis.opendocument.text,application/vnd.openxmlformats-officedocument.wordprocessingml.document"
ng-disabled="fileIsUploading"></file>
</div>
<div class="col-sm-4" ng-if="orphanFiles.length > 0">
+ {{ orphanFiles.length }} file{{ orphanFiles.length > 1 ? 's' : '' }}
</div>
</div>
<div class="col-sm-4" ng-if="orphanFiles.length > 0">
+ {{ orphanFiles.length }} file{{ orphanFiles.length > 1 ? 's' : '' }}
<div class="form-group">
<label class="col-sm-2 control-label" for="inputTags">Tags</label>
<div class="col-sm-10">
<select-tag tags="document.tags" ref="inputTags" ng-disabled="fileIsUploading"></select-tag>
</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputTags">Tags</label>
<div class="col-sm-10">
<select-tag tags="document.tags" ref="inputTags" ng-disabled="fileIsUploading"></select-tag>
</fieldset>
<fieldset>
<legend>Additional metadata</legend>
<div class="form-group" ng-class="{ 'has-error': !documentForm.subject.$valid }">
<label class="col-sm-2 control-label" for="inputSubject">Subject</label>
<div class="col-sm-10">
<input ng-maxlength="500" class="form-control" type="text" id="inputSubject"
placeholder="The topic of the resource" name="subject" ng-model="document.subject" ng-disabled="fileIsUploading" />
</div>
</div>
</div>
<div class="form-group" ng-class="{ 'has-error': !documentForm.identifier.$valid }">
<label class="col-sm-2 control-label" for="inputIdentifier">Identifier</label>
<div class="col-sm-10">
<input ng-maxlength="500" class="form-control" type="text" id="inputIdentifier"
placeholder="An unambiguous reference to the resource within a given context" name="identifier" ng-model="document.identifier" ng-disabled="fileIsUploading" />
</div>
</div>
<div class="form-group" ng-class="{ 'has-error': !documentForm.publisher.$valid }">
<label class="col-sm-2 control-label" for="inputPublisher">Publisher</label>
<div class="col-sm-10">
<input ng-maxlength="500" class="form-control" type="text" id="inputPublisher"
placeholder="An entity responsible for making the resource available" name="publisher" ng-model="document.publisher" ng-disabled="fileIsUploading" />
</div>
</div>
<div class="form-group" ng-class="{ 'has-error': !documentForm.format.$valid }">
<label class="col-sm-2 control-label" for="inputFormat">Format</label>
<div class="col-sm-10">
<input ng-maxlength="500" class="form-control" type="text" id="inputFormat"
placeholder="The file format, physical medium, or dimensions of the resource" name="format" ng-model="document.format" ng-disabled="fileIsUploading" />
</div>
</div>
<div class="form-group" ng-class="{ 'has-error': !documentForm.source.$valid }">
<label class="col-sm-2 control-label" for="inputSource">Source</label>
<div class="col-sm-10">
<input ng-maxlength="500" class="form-control" type="text" id="inputSource"
placeholder="A related resource from which the described resource is derived" name="source" ng-model="document.source" ng-disabled="fileIsUploading" />
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputType">Type</label>
<div class="col-sm-10">
<select class="form-control" id="inputType" name="type" ng-model="document.type" ng-disabled="fileIsUploading">
<option ng-repeat="vocabulary in vocabularies['type']">{{ vocabulary.value }}</option>
</select>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputCoverage">Coverage</label>
<div class="col-sm-10">
<select class="form-control" id="inputCoverage" name="coverage" ng-model="document.coverage" ng-disabled="fileIsUploading">
<option ng-repeat="vocabulary in vocabularies['coverage']">{{ vocabulary.value }}</option>
</select>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label" for="inputRights">Rights</label>
<div class="col-sm-10">
<select class="form-control" id="inputRights" name="rights" ng-model="document.rights" ng-disabled="fileIsUploading">
<option ng-repeat="vocabulary in vocabularies['rights']">{{ vocabulary.value }}</option>
</select>
</div>
</div>
</fieldset>
<div class="form-group">
<div class="col-sm-offset-2 col-sm-10">
<button type="submit" class="btn btn-primary" ng-disabled="!documentForm.$valid || fileIsUploading" ng-click="edit()">{{ isEdit() ? 'Edit' : 'Add' }}</button>

View File

@ -26,7 +26,8 @@
tag:car<br/>
full:led<br/>
shared:yes<br/>
lang:fra"></span>
lang:fra<br/>
by:user1"></span>
</span>
<input type="search" class="form-control" placeholder="Search" ng-model="search" />
<span class="input-group-addon">

View File

@ -1,4 +1,32 @@
<p class="well-sm" ng-bind-html="document.description | newline"></p>
<dl class="dl-horizontal">
<dt ng-if="document.subject">Subject</dt>
<dd ng-if="document.subject">{{ document.subject }}</dd>
<dt ng-if="document.identifier">Identifier</dt>
<dd ng-if="document.identifier">{{ document.identifier }}</dd>
<dt ng-if="document.publisher">Publisher</dt>
<dd ng-if="document.publisher">{{ document.publisher }}</dd>
<dt ng-if="document.format">Format</dt>
<dd ng-if="document.format">{{ document.format }}</dd>
<dt ng-if="document.source">Source</dt>
<dd ng-if="document.source">{{ document.source }}</dd>
<dt ng-if="document.type">Type</dt>
<dd ng-if="document.type">{{ document.type }}</dd>
<dt ng-if="document.coverage">Coverage</dt>
<dd ng-if="document.coverage">{{ document.coverage }}</dd>
<dt ng-if="document.rights">rights</dt>
<dd ng-if="document.rights">{{ document.rights }}</dd>
<dt>Contributors</dt>
<dd>
<span ng-repeat="contributor in document.contributors">
<span class="btn btn-default btn-xs">
<a href="#/user/{{ contributor.username }}">
{{ contributor.username }}
</a>
</span>
</span>
</dd>
</dl>
<div ng-file-drop drag-over-class="bg-success" ng-multiple="true" allow-dir="false" ng-model="dropFiles"
accept="image/*,application/pdf,application/zip" ng-file-change="fileDropped($files, $event, $rejectedFiles)">

View File

@ -36,7 +36,8 @@
<div class="page-header">
<h1>
{{ document.title }} <small>{{ document.create_date | date: 'yyyy-MM-dd' }} by {{ document.creator }}</small>
{{ document.title }} <small>{{ document.create_date | date: 'yyyy-MM-dd' }}
by <a href="#/user/{{ document.creator }}">{{ document.creator }}</a></small>
<img ng-if="document" ng-src="img/flag/{{ document.language }}.png" title="{{ document.language }}" />
</h1>

View File

@ -12,6 +12,7 @@
<div class="panel-heading" ng-show="isAdmin"><strong>General settings</strong></div>
<ul class="list-group">
<a class="list-group-item" ng-show="isAdmin" ng-class="{active: $uiRoute}" ui-route="/settings/user.*" href="#/settings/user">Users</a>
<a class="list-group-item" ng-show="isAdmin" ng-class="{active: $uiRoute}" ui-route="/settings/vocabulary.*" href="#/settings/vocabulary">Vocabularies</a>
<a class="list-group-item" ng-show="isAdmin" ng-class="{active: $uiRoute}" ui-route="/settings/log" href="#/settings/log">Server logs</a>
</ul>
</div>

View File

@ -0,0 +1,51 @@
<h1>Vocabulary <small>entries</small></h1>
<div class="row">
<form class="form-inline">
<div class="form-group">
<label for="inputVocabulary">Choose a vocabulary to edit</label>
<select class="form-control" id="inputVocabulary" ng-model="vocabulary" ng-init="vocabulary = 'type'">
<option value="type">Type</option>
<option value="coverage">Coverage</option>
<option value="rights">Rights</option>
</select>
</div>
</form>
<table class="table table-striped" ng-show="entries">
<thead>
<tr>
<th width="70%">Value</th>
<th width="20%">Order</th>
<th width="10%"></th>
</tr>
</thead>
<tbody>
<tr class="info">
<td>
<input type="text" placeholder="New entry" class="form-control" ng-model="entry.value" maxlength="500" />
</td>
<td>
<input type="number" class="form-control" ng-model="entry.order" />
</td>
<td>
<span ng-click="addEntry(entry)" class="glyphicon glyphicon-plus pointer"></span>
</td>
</tr>
<tr>
<td colspan="3">&nbsp;</td>
</tr>
<tr ng-repeat="entry in entries | orderBy: 'order'">
<td>
<input type="text" class="form-control" ng-model="entry.value" maxlength="500" ng-blur="updateEntry(entry)" />
</td>
<td>
<input type="number" class="form-control" ng-model="entry.order" ng-blur="updateEntry(entry)" />
</td>
<td>
<span ng-click="deleteEntry(entry)" class="glyphicon glyphicon-trash pointer"></span>
</td>
</tr>
</tbody>
</table>
</div>

View File

@ -17,10 +17,10 @@
<input type="search" class="form-control" placeholder="Search" ng-model="search.name">
</p>
<table class="row table table-striped table-hover table-tags">
<table class="row table table-striped table-hover">
<tbody>
<tr ng-repeat="tag in tags | filter:search">
<td><inline-edit value="tag.name" on-edit="updateTag(tag)" /></td>
<td><inline-edit value="tag.name" on-edit="updateTag(tag)" ></inline-edit></td>
<td class="col-xs-4">
<select class="form-control" ng-model="tag.parent" ng-change="updateTag(tag)">
<option value="" ng-selected="!tag.parent"></option>

View File

@ -0,0 +1,27 @@
<div class="row">
<div class="col-md-4">
<div class="well">
<p class="input-group">
<span class="input-group-addon"><span class="glyphicon glyphicon-search"></span></span>
<input type="search" class="form-control" placeholder="Search" ng-model="search">
</p>
<table class="row table table-striped table-hover">
<tbody>
<tr class="pointer" ng-repeat="user in users | filter: search"
ng-click="openUser(user)" ng-class="{ active: $stateParams.username == user.username }">
<td class="col-xs-4">
<span class="glyphicon glyphicon-user"></span>
{{ user.username }}
<span class="text-muted" ng-if="userInfo.username == user.username">It's you!</span>
</td>
</tr>
</tbody>
</table>
</div>
</div>
<div class="col-md-8">
<div ui-view="user"></div>
</div>
</div>

View File

@ -0,0 +1,28 @@
<div class="page-header">
<h1>{{ user.username }} <small>{{ user.email }}</small></h1>
</div>
<h4>Quota used</h4>
<div class="row">
<div class="col-md-6">
<div class="progress" title="{{(user.storage_current / user.storage_quota * 100) | number: 0}}% Used">
<div class="progress-bar" ng-style="{ 'width': (user.storage_current / user.storage_quota * 100) + '%' }">
<span class="sr-only">{{ (user.storage_current / user.storage_quota * 100) }}% Used</span>
</div>
</div>
</div>
</div>
<h4>Related links</h4>
<ul>
<li>
<a ng-href="#/document/search/by:{{ user.username }}">
Documents created by {{ user.username }}
</a>
</li>
<li>
<a ng-href="#/settings/user/edit/{{ user.username }}" ng-if="userInfo.base_functions.indexOf('ADMIN') != -1">
Edit {{ user.username}} user
</a>
</li>
</ul>

View File

@ -34,6 +34,24 @@
</div>
<p ng-bind-html="document.description | newline"></p>
<dl class="dl-horizontal">
<dt ng-if="document.subject">Subject</dt>
<dd ng-if="document.subject">{{ document.subject }}</dd>
<dt ng-if="document.identifier">Identifier</dt>
<dd ng-if="document.identifier">{{ document.identifier }}</dd>
<dt ng-if="document.publisher">Publisher</dt>
<dd ng-if="document.publisher">{{ document.publisher }}</dd>
<dt ng-if="document.format">Format</dt>
<dd ng-if="document.format">{{ document.format }}</dd>
<dt ng-if="document.source">Source</dt>
<dd ng-if="document.source">{{ document.source }}</dd>
<dt ng-if="document.type">Type</dt>
<dd ng-if="document.type">{{ document.type }}</dd>
<dt ng-if="document.coverage">Coverage</dt>
<dd ng-if="document.coverage">{{ document.coverage }}</dd>
<dt ng-if="document.rights">rights</dt>
<dd ng-if="document.rights">{{ document.rights }}</dd>
</dl>
<div class="row" ui-sortable="fileSortableOptions" ng-model="files" ng-show="files.length > 0">
<div class="col-xs-6 col-sm-4 col-md-3 col-lg-2 text-center" ng-repeat="file in files">

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=5
db.version=6

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=5
db.version=6

View File

@ -100,20 +100,36 @@ public class TestAclResource extends BaseJerseyTest {
acls = json.getJsonArray("acls");
Assert.assertEquals(4, acls.size());
// Update the document as acl2
json = target().path("/document/" + document1Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, acl2Token)
.post(Entity.form(new Form()
.param("title", "My new super document 1")), JsonObject.class);
Assert.assertEquals(document1Id, json.getString("id"));
// Get the document as acl2
json = target().path("/document/" + document1Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, acl2Token)
.get(JsonObject.class);
Assert.assertEquals(document1Id, json.getString("id"));
JsonArray contributors = json.getJsonArray("contributors");
Assert.assertEquals(2, contributors.size());
// Delete the ACL WRITE for acl2 with acl2
target().path("/acl/" + document1Id + "/WRITE/" + acl2Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, acl2Token)
.delete();
.delete(JsonObject.class);
// Delete the ACL READ for acl2 with acl2
target().path("/acl/" + document1Id + "/READ/" + acl2Id).request()
// Delete the ACL READ for acl2 with acl2 (not authorized)
response = target().path("/acl/" + document1Id + "/READ/" + acl2Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, acl2Token)
.delete();
Assert.assertEquals(Status.FORBIDDEN, Status.fromStatusCode(response.getStatus()));
// Delete the ACL READ for acl2 with acl1
target().path("/acl/" + document1Id + "/READ/" + acl2Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, acl1Token)
.delete();
.delete(JsonObject.class);
// Get the document as acl1
json = target().path("/document/" + document1Id).request()

View File

@ -59,13 +59,25 @@ public class TestAuditLogResource extends BaseJerseyTest {
.get(JsonObject.class);
JsonArray logs = json.getJsonArray("logs");
Assert.assertTrue(logs.size() == 3);
Assert.assertEquals(countByClass(logs, "Document"), 1);
Assert.assertEquals(countByClass(logs, "Acl"), 2);
Assert.assertEquals("auditlog1", logs.getJsonObject(0).getString("username"));
Assert.assertNotNull(logs.getJsonObject(0).getString("id"));
Assert.assertNotNull(logs.getJsonObject(0).getString("target"));
Assert.assertNotNull(logs.getJsonObject(0).getString("type"));
Assert.assertNotNull(logs.getJsonObject(0).getString("message"));
Assert.assertNotNull(logs.getJsonObject(0).getJsonNumber("create_date"));
Assert.assertEquals("auditlog1", logs.getJsonObject(1).getString("username"));
Assert.assertEquals("auditlog1", logs.getJsonObject(2).getString("username"));
// Get all logs for the current user
json = target().path("/auditlog").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, auditlog1Token)
.get(JsonObject.class);
logs = json.getJsonArray("logs");
Assert.assertTrue(logs.size() == 3);
Assert.assertTrue(logs.size() == 2);
Assert.assertEquals(countByClass(logs, "Document"), 1);
Assert.assertEquals(countByClass(logs, "Tag"), 1);
// Deletes a tag
json = target().path("/tag/" + tag1Id).request()
@ -78,6 +90,25 @@ public class TestAuditLogResource extends BaseJerseyTest {
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, auditlog1Token)
.get(JsonObject.class);
logs = json.getJsonArray("logs");
Assert.assertTrue(logs.size() == 4);
Assert.assertTrue(logs.size() == 3);
Assert.assertEquals(countByClass(logs, "Document"), 1);
Assert.assertEquals(countByClass(logs, "Tag"), 2);
}
/**
* Count logs by class.
*
* @param logs Logs
* @param clazz Class
* @return Count by class
*/
private int countByClass(JsonArray logs, String clazz) {
int count = 0;
for (int i = 0; i < logs.size(); i++) {
if (logs.getJsonObject(i).getString("class").equals(clazz)) {
count++;
}
}
return count;
}
}

View File

@ -55,13 +55,21 @@ public class TestDocumentResource extends BaseJerseyTest {
String tag1Id = json.getString("id");
Assert.assertNotNull(tag1Id);
// Create a document
// Create a document with document1
long create1Date = new Date().getTime();
json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("subject", "Subject document 1")
.param("identifier", "Identifier document 1")
.param("publisher", "Publisher document 1")
.param("format", "Format document 1")
.param("source", "Source document 1")
.param("type", "Software")
.param("coverage", "Greenland")
.param("rights", "Public Domain")
.param("tags", tag1Id)
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
@ -160,6 +168,17 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(1, searchDocuments("full:title", document1Token));
Assert.assertEquals(1, searchDocuments("title", document1Token));
Assert.assertEquals(1, searchDocuments("super description", document1Token));
Assert.assertEquals(1, searchDocuments("subject", document1Token));
Assert.assertEquals(1, searchDocuments("identifier", document1Token));
Assert.assertEquals(1, searchDocuments("publisher", document1Token));
Assert.assertEquals(1, searchDocuments("format", document1Token));
Assert.assertEquals(1, searchDocuments("source", document1Token));
Assert.assertEquals(1, searchDocuments("software", document1Token));
Assert.assertEquals(1, searchDocuments("greenland", document1Token));
Assert.assertEquals(1, searchDocuments("public domain", document1Token));
Assert.assertEquals(0, searchDocuments("by:document3", document1Token));
Assert.assertEquals(1, searchDocuments("by:document1", document1Token));
Assert.assertEquals(0, searchDocuments("by:nobody", document1Token));
Assert.assertEquals(1, searchDocuments("at:" + DateTimeFormat.forPattern("yyyy").print(new Date().getTime()), document1Token));
Assert.assertEquals(1, searchDocuments("at:" + DateTimeFormat.forPattern("yyyy-MM").print(new Date().getTime()), document1Token));
Assert.assertEquals(1, searchDocuments("at:" + DateTimeFormat.forPattern("yyyy-MM-dd").print(new Date().getTime()), document1Token));
@ -190,11 +209,22 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(true, json.getBoolean("shared"));
Assert.assertEquals("My super title document 1", json.getString("title"));
Assert.assertEquals("My super description for document 1", json.getString("description"));
Assert.assertEquals("Subject document 1", json.getString("subject"));
Assert.assertEquals("Identifier document 1", json.getString("identifier"));
Assert.assertEquals("Publisher document 1", json.getString("publisher"));
Assert.assertEquals("Format document 1", json.getString("format"));
Assert.assertEquals("Source document 1", json.getString("source"));
Assert.assertEquals("Software", json.getString("type"));
Assert.assertEquals("Greenland", json.getString("coverage"));
Assert.assertEquals("Public Domain", json.getString("rights"));
Assert.assertEquals("eng", json.getString("language"));
Assert.assertEquals(create1Date, json.getJsonNumber("create_date").longValue());
tags = json.getJsonArray("tags");
Assert.assertEquals(1, tags.size());
Assert.assertEquals(tag1Id, tags.getJsonObject(0).getString("id"));
JsonArray contributors = json.getJsonArray("contributors");
Assert.assertEquals(1, contributors.size());
Assert.assertEquals("document1", contributors.getJsonObject(0).getString("username"));
// Export a document in PDF format
Response response = target().path("/document/" + document1Id).request()
@ -217,6 +247,14 @@ public class TestDocumentResource extends BaseJerseyTest {
.post(Entity.form(new Form()
.param("title", "My new super document 1")
.param("description", "My new super description for document 1")
.param("subject", "My new subject for document 1")
.param("identifier", "My new identifier for document 1")
.param("publisher", "My new publisher for document 1")
.param("format", "My new format for document 1")
.param("source", "My new source for document 1")
.param("type", "Image")
.param("coverage", "France")
.param("rights", "All Rights Reserved")
.param("tags", tag2Id)), JsonObject.class);
Assert.assertEquals(document1Id, json.getString("id"));
@ -233,9 +271,20 @@ public class TestDocumentResource extends BaseJerseyTest {
.get(JsonObject.class);
Assert.assertTrue(json.getString("title").contains("new"));
Assert.assertTrue(json.getString("description").contains("new"));
Assert.assertTrue(json.getString("subject").contains("new"));
Assert.assertTrue(json.getString("identifier").contains("new"));
Assert.assertTrue(json.getString("publisher").contains("new"));
Assert.assertTrue(json.getString("format").contains("new"));
Assert.assertTrue(json.getString("source").contains("new"));
Assert.assertEquals("Image", json.getString("type"));
Assert.assertEquals("France", json.getString("coverage"));
Assert.assertEquals("All Rights Reserved", json.getString("rights"));
tags = json.getJsonArray("tags");
Assert.assertEquals(1, tags.size());
Assert.assertEquals(tag2Id, tags.getJsonObject(0).getString("id"));
contributors = json.getJsonArray("contributors");
Assert.assertEquals(1, contributors.size());
Assert.assertEquals("document1", contributors.getJsonObject(0).getString("username"));
// Deletes a document
json = target().path("/document/" + document1Id).request()

View File

@ -0,0 +1,115 @@
package com.sismics.docs.rest;
import javax.json.JsonObject;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.junit.Assert;
import org.junit.Test;
import com.sismics.util.filter.TokenBasedSecurityFilter;
/**
* Exhaustive test of the vocabulary resource.
*
* @author bgamard
*/
public class TestVocabularyResource extends BaseJerseyTest {
/**
* Test the vocabulary resource.
*
* @throws Exception
*/
@Test
public void testVocabularyResource() throws Exception {
// Login vocabulary1
clientUtil.createUser("vocabulary1");
String vocabulary1Token = clientUtil.login("vocabulary1");
// Login admin
String adminAuthenticationToken = clientUtil.login("admin", "admin", false);
// Get coverage vocabularies entries
JsonObject json = target().path("/vocabulary/coverage").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, vocabulary1Token)
.get(JsonObject.class);
Assert.assertEquals(249, json.getJsonArray("entries").size());
JsonObject entry = json.getJsonArray("entries").getJsonObject(0);
Assert.assertEquals("coverage-afg", entry.getString("id"));
Assert.assertEquals("coverage", entry.getString("name"));
Assert.assertEquals("Afghanistan", entry.getString("value"));
Assert.assertEquals(0, entry.getJsonNumber("order").intValue());
entry = json.getJsonArray("entries").getJsonObject(248);
Assert.assertEquals("coverage-zwe", entry.getString("id"));
Assert.assertEquals("coverage", entry.getString("name"));
Assert.assertEquals("Zimbabwe", entry.getString("value"));
Assert.assertEquals(248, entry.getJsonNumber("order").intValue());
// Create a vocabulary entry with admin
json = target().path("/vocabulary").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminAuthenticationToken)
.put(Entity.form(new Form()
.param("name", "test-voc-1")
.param("value", "First value")
.param("order", "0")), JsonObject.class);
String vocabulary1Id = json.getString("id");
Assert.assertNotNull(vocabulary1Id);
Assert.assertEquals("test-voc-1", json.getString("name"));
Assert.assertEquals("First value", json.getString("value"));
Assert.assertEquals(0, json.getJsonNumber("order").intValue());
// Create a vocabulary entry with admin
Response response = target().path("/vocabulary").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminAuthenticationToken)
.put(Entity.form(new Form()
.param("name", "NOT_VALID")
.param("value", "First value")
.param("order", "0")));
Assert.assertEquals(Status.BAD_REQUEST.getStatusCode(), response.getStatus());
// Get test-voc-1 vocabularies entries
json = target().path("/vocabulary/test-voc-1").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, vocabulary1Token)
.get(JsonObject.class);
Assert.assertEquals(1, json.getJsonArray("entries").size());
entry = json.getJsonArray("entries").getJsonObject(0);
Assert.assertEquals(vocabulary1Id, entry.getString("id"));
Assert.assertEquals("First value", entry.getString("value"));
Assert.assertEquals(0, entry.getJsonNumber("order").intValue());
// Update a vocabulary entry with admin
json = target().path("/vocabulary/" + vocabulary1Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminAuthenticationToken)
.post(Entity.form(new Form()
.param("name", "test-voc-1-updated")
.param("value", "First value updated")
.param("order", "1")), JsonObject.class);
Assert.assertEquals(vocabulary1Id, json.getString("id"));
Assert.assertEquals("test-voc-1-updated", json.getString("name"));
Assert.assertEquals("First value updated", json.getString("value"));
Assert.assertEquals(1, json.getJsonNumber("order").intValue());
// Get test-voc-1-updated vocabularies entries
json = target().path("/vocabulary/test-voc-1-updated").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, vocabulary1Token)
.get(JsonObject.class);
Assert.assertEquals(1, json.getJsonArray("entries").size());
entry = json.getJsonArray("entries").getJsonObject(0);
Assert.assertEquals(vocabulary1Id, entry.getString("id"));
Assert.assertEquals("First value updated", entry.getString("value"));
Assert.assertEquals(1, entry.getJsonNumber("order").intValue());
// Delete a vocabulary entry with admin
json = target().path("/vocabulary/" + vocabulary1Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, adminAuthenticationToken)
.delete(JsonObject.class);
// Get test-voc-1-updated vocabularies entries
json = target().path("/vocabulary/test-voc-1-updated").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, vocabulary1Token)
.get(JsonObject.class);
Assert.assertEquals(0, json.getJsonArray("entries").size());
}
}

View File

@ -2,6 +2,6 @@
docker rm -f sismics_docs
docker run \
-d --name=sismics_docs --restart=always \
--volumes-from=sismics_docs_data \
-v sismics_docs_data:/data \
-e 'VIRTUAL_HOST_SECURE=docs.sismics.com' -e 'VIRTUAL_PORT=80' \
sismics/docs:latest