Merge pull request #27 from sismics/master

Push to production
This commit is contained in:
Benjamin Gamard 2015-09-05 21:41:43 +02:00
commit df1eaf54c8
37 changed files with 382 additions and 138 deletions

View File

@ -27,9 +27,10 @@ Features
- SHA-256 encryption
- Tag system
- Multi-users ACL system
- Audit log
- Document sharing by URL
- RESTful Web API
- Modern Android client
- Fully featured Android client
Download
--------
@ -71,7 +72,7 @@ From the `docs-web` directory:
mvn -Pprod -DskipTests clean install
You will get your deployable WAR in the `target` directory.
You will get your deployable WAR in the `docs-web/target` directory.
License
-------

View File

@ -146,8 +146,8 @@
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -11,6 +11,7 @@ import java.util.UUID;
import javax.persistence.EntityManager;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.dao.jpa.criteria.AuditLogCriteria;
import com.sismics.docs.core.dao.jpa.dto.AuditLogDto;
@ -57,37 +58,29 @@ public class AuditLogDao {
*/
public void findByCriteria(PaginatedList<AuditLogDto> paginatedList, AuditLogCriteria criteria, SortCriteria sortCriteria) throws Exception {
Map<String, Object> parameterMap = new HashMap<String, Object>();
List<String> criteriaList = new ArrayList<String>();
StringBuilder sb = new StringBuilder("select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, l.LOG_IDENTITY_C c2, l.LOG_CLASSENTITY_C c3, l.LOG_TYPE_C c4, l.LOG_MESSAGE_C c5 ");
sb.append(" from T_AUDIT_LOG l ");
String baseQuery = "select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, l.LOG_IDENTITY_C c2, l.LOG_CLASSENTITY_C c3, l.LOG_TYPE_C c4, l.LOG_MESSAGE_C c5 from T_AUDIT_LOG l ";
List<String> queries = Lists.newArrayList();
// Adds search criteria
if (criteria.getDocumentId() != null) {
// ACL on document is not checked here, it's assumed
StringBuilder sb0 = new StringBuilder(" (l.LOG_IDENTITY_C = :documentId and l.LOG_CLASSENTITY_C = 'Document' ");
sb0.append(" or l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) and l.LOG_CLASSENTITY_C = 'File' ");
sb0.append(" or l.LOG_IDENTITY_C in (select a.ACL_ID_C from T_ACL a where a.ACL_SOURCEID_C = :documentId) and l.LOG_CLASSENTITY_C = 'Acl') ");
criteriaList.add(sb0.toString());
queries.add(baseQuery + " where l.LOG_IDENTITY_C = :documentId ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select a.ACL_ID_C from T_ACL a where a.ACL_SOURCEID_C = :documentId) ");
parameterMap.put("documentId", criteria.getDocumentId());
}
if (criteria.getUserId() != null) {
StringBuilder sb0 = new StringBuilder(" (l.LOG_IDENTITY_C = :userId and l.LOG_CLASSENTITY_C = 'User' ");
sb0.append(" or l.LOG_IDENTITY_C in (select t.TAG_ID_C from T_TAG t where t.TAG_IDUSER_C = :userId) and l.LOG_CLASSENTITY_C = 'Tag' ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C = :userId ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select t.TAG_ID_C from T_TAG t where t.TAG_IDUSER_C = :userId) ");
// Show only logs from owned documents, ACL are lost on delete
sb0.append(" or l.LOG_IDENTITY_C in (select d.DOC_ID_C from T_DOCUMENT d where d.DOC_IDUSER_C = :userId) and l.LOG_CLASSENTITY_C = 'Document') ");
criteriaList.add(sb0.toString());
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select d.DOC_ID_C from T_DOCUMENT d where d.DOC_IDUSER_C = :userId) ");
parameterMap.put("userId", criteria.getUserId());
}
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = new QueryParam(sb.toString(), parameterMap);
QueryParam queryParam = new QueryParam(Joiner.on(" union ").join(queries), parameterMap);
List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria);
// Assemble results

View File

@ -183,7 +183,7 @@ public class DocumentDao {
Map<String, Object> parameterMap = new HashMap<String, Object>();
List<String> criteriaList = new ArrayList<String>();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, ");
StringBuilder sb = new StringBuilder("select d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, ");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) c5, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) c6 ");
sb.append(" from T_DOCUMENT d ");
@ -215,8 +215,7 @@ public class DocumentDao {
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
int index = 0;
for (String tagId : criteria.getTagIdList()) {
sb.append(" left join T_DOCUMENT_TAG dt" + index + " on dt" + index + ".DOT_IDDOCUMENT_C = d.DOC_ID_C and dt" + index + ".DOT_IDTAG_C = :tagId" + index + " ");
criteriaList.add("dt" + index + ".DOT_ID_C is not null");
sb.append(" join T_DOCUMENT_TAG dt" + index + " on dt" + index + ".DOT_IDDOCUMENT_C = d.DOC_ID_C and dt" + index + ".DOT_IDTAG_C = :tagId" + index + " ");
parameterMap.put("tagId" + index, tagId);
index++;
}

View File

@ -121,9 +121,6 @@ public class FileDao {
fileFromDb.setContent(file.getContent());
fileFromDb.setOrder(file.getOrder());
// Create audit log
AuditLogUtil.create(fileFromDb, AuditLogType.UPDATE);
return file;
}

View File

@ -104,10 +104,10 @@ public class DirectoryUtil {
*/
private static File getDataSubDirectory(String subdirectory) {
File baseDataDir = getBaseDataDirectory();
File faviconDirectory = new File(baseDataDir.getPath() + File.separator + subdirectory);
if (!faviconDirectory.isDirectory()) {
faviconDirectory.mkdirs();
File directory = new File(baseDataDir.getPath() + File.separator + subdirectory);
if (!directory.isDirectory()) {
directory.mkdirs();
}
return faviconDirectory;
return directory;
}
}

View File

@ -88,7 +88,7 @@ public class FileUtil {
log.info("Starting OCR with TESSDATA_PREFIX=" + System.getenv("TESSDATA_PREFIX") + ";LC_NUMERIC=" + System.getenv("LC_NUMERIC"));
instance.setLanguage(document.getLanguage());
content = instance.doOCR(image);
} catch (Exception e) {
} catch (Throwable e) {
log.error("Error while OCR-izing the image", e);
}

View File

@ -71,17 +71,17 @@ public final class EMF {
properties.load(is);
return properties;
}
} catch (IOException e) {
} catch (IOException | IllegalArgumentException e) {
log.error("Error reading hibernate.properties", e);
}
// Use environment parameters
log.info("Configuring EntityManager from environment parameters");
Map<Object, Object> props = new HashMap<Object, Object>();
props.put("hibernate.connection.driver_class", "org.hsqldb.jdbcDriver");
props.put("hibernate.connection.driver_class", "org.h2.Driver");
File dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.getAbsoluteFile() + File.separator + "docs";
props.put("hibernate.connection.url", "jdbc:hsqldb:file:" + dbFile + ";hsqldb.write_delay=false;shutdown=true");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536");
props.put("hibernate.connection.username", "sa");
props.put("hibernate.hbm2ddl.auto", "none");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");

View File

@ -1 +1 @@
db.version=10
db.version=0

View File

@ -1,18 +1,23 @@
SET IGNORECASE TRUE;
create memory table T_AUTHENTICATION_TOKEN ( AUT_ID_C varchar(36) not null, AUT_IDUSER_C varchar(36) not null, AUT_LONGLASTED_B bit not null, AUT_CREATIONDATE_D datetime not null, AUT_LASTCONNECTIONDATE_D datetime, primary key (AUT_ID_C) );
create memory table T_AUTHENTICATION_TOKEN ( AUT_ID_C varchar(36) not null, AUT_IDUSER_C varchar(36) not null, AUT_LONGLASTED_B bit not null, AUT_CREATIONDATE_D datetime not null, AUT_LASTCONNECTIONDATE_D datetime, AUT_IP_C varchar(45), AUT_UA_C varchar(1000), primary key (AUT_ID_C) );
create memory table T_BASE_FUNCTION ( BAF_ID_C varchar(20) not null, primary key (BAF_ID_C) );
create cached table T_FILE ( FIL_ID_C varchar(36) not null, FIL_IDDOC_C varchar(36) not null, FIL_MIMETYPE_C varchar(100) not null, FIL_CREATEDATE_D datetime, FIL_DELETEDATE_D datetime, primary key (FIL_ID_C) );
create cached table T_FILE ( FIL_ID_C varchar(36) not null, FIL_IDDOC_C varchar(36), FIL_IDUSER_C varchar(36) not null, FIL_MIMETYPE_C varchar(100) not null, FIL_CREATEDATE_D datetime, FIL_DELETEDATE_D datetime, FIL_ORDER_N int, FIL_CONTENT_C longvarchar, primary key (FIL_ID_C) );
create memory table T_CONFIG ( CFG_ID_C varchar(50) not null, CFG_VALUE_C varchar(250) not null, primary key (CFG_ID_C) );
create memory table T_LOCALE ( LOC_ID_C varchar(10) not null, primary key (LOC_ID_C) );
create cached table T_DOCUMENT ( DOC_ID_C varchar(36) not null, DOC_IDUSER_C varchar(36) not null, DOC_TITLE_C varchar(100) not null, DOC_DESCRIPTION_C varchar(4000), DOC_CREATEDATE_D datetime, DOC_DELETEDATE_D datetime, primary key (DOC_ID_C) );
create memory table T_USER ( USE_ID_C varchar(36) not null, USE_IDLOCALE_C varchar(10) not null, USE_IDROLE_C varchar(36) not null, USE_USERNAME_C varchar(50) not null, USE_PASSWORD_C varchar(60) not null, USE_EMAIL_C varchar(100) not null, USE_THEME_C varchar(100) not null, USE_FIRSTCONNECTION_B bit not null, USE_CREATEDATE_D datetime not null, USE_DELETEDATE_D datetime, primary key (USE_ID_C) );
create cached table T_DOCUMENT ( DOC_ID_C varchar(36) not null, DOC_IDUSER_C varchar(36) not null, DOC_TITLE_C varchar(100) not null, DOC_DESCRIPTION_C varchar(4000), DOC_CREATEDATE_D datetime, DOC_DELETEDATE_D datetime, DOC_LANGUAGE_C varchar(3) default 'fra' not null, primary key (DOC_ID_C) );
create memory table T_USER ( USE_ID_C varchar(36) not null, USE_IDLOCALE_C varchar(10) not null, USE_IDROLE_C varchar(36) not null, USE_USERNAME_C varchar(50) not null, USE_PASSWORD_C varchar(60) not null, USE_EMAIL_C varchar(100) not null, USE_THEME_C varchar(100) not null, USE_FIRSTCONNECTION_B bit not null, USE_CREATEDATE_D datetime not null, USE_DELETEDATE_D datetime, USE_PRIVATEKEY_C varchar(100) default '' not null, primary key (USE_ID_C) );
create memory table T_ROLE ( ROL_ID_C varchar(36) not null, ROL_NAME_C varchar(36) not null, ROL_CREATEDATE_D datetime not null, ROL_DELETEDATE_D datetime, primary key (ROL_ID_C) );
create memory table T_ROLE_BASE_FUNCTION ( RBF_ID_C varchar(36) not null, RBF_IDROLE_C varchar(36) not null, RBF_IDBASEFUNCTION_C varchar(20) not null, RBF_CREATEDATE_D datetime not null, RBF_DELETEDATE_D datetime, primary key (RBF_ID_C) );
create cached table T_TAG ( TAG_ID_C varchar(36) not null, TAG_IDUSER_C varchar(36) not null, TAG_NAME_C varchar(36) not null, TAG_CREATEDATE_D datetime, TAG_DELETEDATE_D datetime, primary key (TAG_ID_C) );
create cached table T_DOCUMENT_TAG ( DOT_ID_C varchar(36) not null, DOT_IDDOCUMENT_C varchar(36) not null, DOT_IDTAG_C varchar(36) not null, primary key (DOT_ID_C) );
create cached table T_TAG ( TAG_ID_C varchar(36) not null, TAG_IDUSER_C varchar(36) not null, TAG_NAME_C varchar(36) not null, TAG_CREATEDATE_D datetime, TAG_DELETEDATE_D datetime, TAG_COLOR_C varchar(7) default '#3a87ad' not null, primary key (TAG_ID_C) );
create cached table T_DOCUMENT_TAG ( DOT_ID_C varchar(36) not null, DOT_IDDOCUMENT_C varchar(36) not null, DOT_IDTAG_C varchar(36) not null, DOT_DELETEDATE_D datetime, primary key (DOT_ID_C) );
create cached table T_ACL ( ACL_ID_C varchar(36) not null, ACL_PERM_C varchar(30) not null, ACL_SOURCEID_C varchar(36) not null, ACL_TARGETID_C varchar(36) not null, ACL_DELETEDATE_D datetime, primary key (ACL_ID_C) );
create cached table T_SHARE ( SHA_ID_C varchar(36) not null, SHA_NAME_C varchar(36), SHA_CREATEDATE_D datetime, SHA_DELETEDATE_D datetime, primary key (SHA_ID_C) );
create cached table T_AUDIT_LOG ( LOG_ID_C varchar(36) not null, LOG_IDENTITY_C varchar(36) not null, LOG_CLASSENTITY_C varchar(50) not null, LOG_TYPE_C varchar(50) not null, LOG_MESSAGE_C varchar(1000), LOG_CREATEDATE_D datetime, primary key (LOG_ID_C) );
alter table T_AUTHENTICATION_TOKEN add constraint FK_AUT_IDUSER_C foreign key (AUT_IDUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict;
alter table T_DOCUMENT add constraint FK_DOC_IDUSER_C foreign key (DOC_IDUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict;
alter table T_FILE add constraint FK_FIL_IDDOC_C foreign key (FIL_IDDOC_C) references T_DOCUMENT (DOC_ID_C) on delete restrict on update restrict;
alter table T_FILE add constraint FK_FIL_IDUSER_C foreign key (FIL_IDUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict;
alter table T_USER add constraint FK_USE_IDLOCALE_C foreign key (USE_IDLOCALE_C) references T_LOCALE (LOC_ID_C) on delete restrict on update restrict;
alter table T_USER add constraint FK_USE_IDROLE_C foreign key (USE_IDROLE_C) references T_ROLE (ROL_ID_C) on delete restrict on update restrict;
alter table T_TAG add constraint FK_TAG_IDUSER_C foreign key (TAG_IDUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict;
@ -20,11 +25,20 @@ alter table T_DOCUMENT_TAG add constraint FK_DOT_IDDOCUMENT_C foreign key (DOT_I
alter table T_DOCUMENT_TAG add constraint FK_DOT_IDTAG_C foreign key (DOT_IDTAG_C) references T_TAG (TAG_ID_C) on delete restrict on update restrict;
alter table T_ROLE_BASE_FUNCTION add constraint FK_RBF_IDROLE_C foreign key (RBF_IDROLE_C) references T_ROLE (ROL_ID_C) on delete restrict on update restrict;
alter table T_ROLE_BASE_FUNCTION add constraint FK_RBF_IDBASEFUNCTION_C foreign key (RBF_IDBASEFUNCTION_C) references T_BASE_FUNCTION (BAF_ID_C) on delete restrict on update restrict;
create index IDX_DOC_TITLE_C on T_DOCUMENT (DOC_TITLE_C);
create index IDX_DOC_CREATEDATE_D on T_DOCUMENT (DOC_CREATEDATE_D);
create index IDX_DOC_LANGUAGE_C on T_DOCUMENT (DOC_LANGUAGE_C);
create index IDX_ACL_SOURCEID_C on T_ACL (ACL_SOURCEID_C);
create index IDX_ACL_TARGETID_C on T_ACL (ACL_TARGETID_C);
create index IDX_LOG_IDENTITY_C on T_AUDIT_LOG (LOG_IDENTITY_C);
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('DB_VERSION', '0');
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('LUCENE_DIRECTORY_STORAGE', 'FILE');
insert into T_BASE_FUNCTION(BAF_ID_C) values('ADMIN');
insert into T_LOCALE(LOC_ID_C) values('en');
insert into T_LOCALE(LOC_ID_C) values('fr');
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('admin', 'Admin', NOW());
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('user', 'User', NOW());
insert into T_ROLE_BASE_FUNCTION(RBF_ID_C, RBF_IDROLE_C, RBF_IDBASEFUNCTION_C, RBF_CREATEDATE_D) values('admin_ADMIN', 'admin', 'ADMIN', NOW());
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D) values('admin', 'en', 'admin', 'admin', '$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C', 'admin@localhost', 'default.less', true, NOW());
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D, USE_PRIVATEKEY_C) values('admin', 'en', 'admin', 'admin', '$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C', 'admin@localhost', 'default.less', true, NOW(), 'AdminPk');

View File

@ -1,2 +0,0 @@
alter table T_FILE add column FIL_ORDER_N int;
update T_CONFIG set CFG_VALUE_C='1' where CFG_ID_C='DB_VERSION';

View File

@ -1,2 +0,0 @@
alter table T_TAG add column TAG_COLOR_C varchar(7) default '#3a87ad' not null;
update T_CONFIG set CFG_VALUE_C='2' where CFG_ID_C='DB_VERSION';

View File

@ -1,2 +0,0 @@
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('user', 'User', NOW());
update T_CONFIG set CFG_VALUE_C='3' where CFG_ID_C='DB_VERSION';

View File

@ -1,2 +0,0 @@
create cached table T_SHARE ( SHA_ID_C varchar(36) not null, SHA_NAME_C varchar(36), SHA_IDDOCUMENT_C varchar(36) not null, SHA_CREATEDATE_D datetime, SHA_DELETEDATE_D datetime, primary key (SHA_ID_C) );
update T_CONFIG set CFG_VALUE_C='4' where CFG_ID_C='DB_VERSION';

View File

@ -1,3 +0,0 @@
alter table T_FILE add column FIL_CONTENT_C LONGVARCHAR;
alter table T_DOCUMENT add column DOC_LANGUAGE_C varchar(3) default 'fra' not null;
update T_CONFIG set CFG_VALUE_C='5' where CFG_ID_C='DB_VERSION';

View File

@ -1,3 +0,0 @@
alter table T_USER add column USE_PRIVATEKEY_C varchar(100) default '' not null;
update T_USER set USE_PRIVATEKEY_C = 'AdminPk' where USE_ID_C = 'admin';
update T_CONFIG set CFG_VALUE_C='6' where CFG_ID_C='DB_VERSION';

View File

@ -1,3 +0,0 @@
alter table T_FILE alter column FIL_IDDOC_C set null;
alter table T_FILE add column FIL_IDUSER_C varchar(36);
update T_CONFIG set CFG_VALUE_C='7' where CFG_ID_C='DB_VERSION';

View File

@ -1,4 +0,0 @@
create cached table T_ACL ( ACL_ID_C varchar(36) not null, ACL_PERM_C varchar(30) not null, ACL_SOURCEID_C varchar(36) not null, ACL_TARGETID_C varchar(36) not null, ACL_DELETEDATE_D datetime, primary key (ACL_ID_C) );
drop table T_SHARE;
create cached table T_SHARE ( SHA_ID_C varchar(36) not null, SHA_NAME_C varchar(36), SHA_CREATEDATE_D datetime, SHA_DELETEDATE_D datetime, primary key (SHA_ID_C) );
update T_CONFIG set CFG_VALUE_C='8' where CFG_ID_C='DB_VERSION';

View File

@ -1,2 +0,0 @@
create index IDX_ACL_COMPOSITE on T_ACL (ACL_SOURCEID_C, ACL_TARGETID_C, ACL_PERM_C, ACL_DELETEDATE_D);
update T_CONFIG set CFG_VALUE_C='9' where CFG_ID_C='DB_VERSION';

View File

@ -1,7 +0,0 @@
alter table T_FILE alter column FIL_IDUSER_C set not null;
alter table T_AUTHENTICATION_TOKEN add column AUT_IP_C varchar(45);
alter table T_AUTHENTICATION_TOKEN add column AUT_UA_C varchar(1000);
create cached table T_AUDIT_LOG ( LOG_ID_C varchar(36) not null, LOG_IDENTITY_C varchar(36) not null, LOG_CLASSENTITY_C varchar(50) not null, LOG_TYPE_C varchar(50) not null, LOG_MESSAGE_C varchar(1000), LOG_CREATEDATE_D datetime, primary key (LOG_ID_C) );
create index IDX_LOG_COMPOSITE on T_AUDIT_LOG (LOG_IDENTITY_C, LOG_CLASSENTITY_C);
alter table T_DOCUMENT_TAG add column DOT_DELETEDATE_D datetime;
update T_CONFIG set CFG_VALUE_C='10' where CFG_ID_C='DB_VERSION';

View File

@ -1,5 +1,5 @@
hibernate.connection.driver_class=org.hsqldb.jdbcDriver
hibernate.connection.url=jdbc:hsqldb:mem:docs
hibernate.connection.driver_class=org.h2.Driver
hibernate.connection.url=jdbc:h2:mem:docs
hibernate.connection.username=sa
hibernate.connection.password=
hibernate.hbm2ddl.auto=none

View File

@ -24,7 +24,7 @@
<org.slf4j.version>1.6.4</org.slf4j.version>
<org.slf4j.jcl-over-slf4j.version>1.6.6</org.slf4j.jcl-over-slf4j.version>
<junit.junit.version>4.7</junit.junit.version>
<org.hsqldb.hsqldb.version>2.3.0</org.hsqldb.hsqldb.version>
<com.h2database.h2.version>1.4.188</com.h2database.h2.version>
<com.sun.jersey.version>1.17</com.sun.jersey.version>
<org.mindrot.jbcrypt>0.3m</org.mindrot.jbcrypt>
<org.subethamail.subethasmtp.version>3.1.6</org.subethamail.subethasmtp.version>
@ -35,6 +35,8 @@
<org.apache.lucene.version>4.2.0</org.apache.lucene.version>
<jgoodies.forms.version>1.0.5</jgoodies.forms.version>
<org.imgscalr.imgscalr-lib.version>4.2</org.imgscalr.imgscalr-lib.version>
<org.apache.pdfbox.pdfbox.version>1.8.10</org.apache.pdfbox.pdfbox.version>
<org.bouncycastle.bcprov-jdk15on.version>1.49</org.bouncycastle.bcprov-jdk15on.version>
<com.sun.grizzly.version>1.9.18-m</com.sun.grizzly.version>
<org.hibernate.hibernate.version>4.1.0.Final</org.hibernate.hibernate.version>
@ -62,8 +64,6 @@
<org.mortbay.jetty.jetty-maven-plugin.version>8.1.2.v20120308</org.mortbay.jetty.jetty-maven-plugin.version>
<org.vafer.jdeb.version>1.0.1</org.vafer.jdeb.version>
<com.samaxes.maven.minify-maven-plugin.version>1.7</com.samaxes.maven.minify-maven-plugin.version>
<org.apache.pdfbox.pdfbox.version>1.8.8</org.apache.pdfbox.pdfbox.version>
<org.bouncycastle.bcprov-jdk15on.version>1.49</org.bouncycastle.bcprov-jdk15on.version>
</properties>
<scm>
@ -318,6 +318,12 @@
<version>${com.sun.jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>${com.sun.jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.grizzly</groupId>
<artifactId>grizzly-servlet-webserver</artifactId>
@ -331,9 +337,9 @@
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>${org.hsqldb.hsqldb.version}</version>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${com.h2database.h2.version}</version>
</dependency>
<dependency>

71
docs-stress/pom.xml Normal file
View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.0-SNAPSHOT</version>
<relativePath>../docs-parent</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>docs-stress</artifactId>
<packaging>jar</packaging>
<name>Docs Stress</name>
<dependencies>
<!-- Dependencies to Jersey -->
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<!-- Depenedencies to Docs -->
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
<type>test-jar</type>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
</resources>
</build>
</project>

View File

@ -0,0 +1,140 @@
package com.sismics.docs.stress;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import javax.ws.rs.core.MediaType;
import junit.framework.Assert;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.sismics.docs.rest.filter.CookieAuthenticationFilter;
import com.sismics.docs.rest.util.ClientUtil;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import com.sun.jersey.multipart.FormDataBodyPart;
import com.sun.jersey.multipart.FormDataMultiPart;
/**
* Stress app for Sismics Docs.
*
* @author bgamard
*/
public class Main {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(Main.class);
private static final String API_URL = "http://localhost:9999/docs-web/api/";
private static final int USER_COUNT = 50;
private static final int DOCUMENT_PER_USER_COUNT = 2000;
private static final int TAG_PER_USER_COUNT = 20;
private static final int FILE_PER_DOCUMENT_COUNT = 0;
private static Client client = Client.create();
private static ClientUtil clientUtil;
private static Set<User> userSet = Sets.newHashSet();
/**
* Entry point.
*
* @param args Args
*/
public static void main(String[] args) {
log.info("Starting stress test...");
WebResource resource = client.resource(API_URL);
clientUtil = new ClientUtil(resource);
// Create users
for (int i = 0; i < USER_COUNT; i++) {
String username = generateString();
clientUtil.createUser(username);
userSet.add(new User(username, (clientUtil.login(username))));
log.info("Created user " + (i + 1) + "/" + USER_COUNT);
}
// Create tags for each user
int tagCreatedCount = 1;
for (User user : userSet) {
WebResource tagResource = resource.path("/tag");
tagResource.addFilter(new CookieAuthenticationFilter(user.authToken));
for (int j = 0; j < TAG_PER_USER_COUNT; j++) {
MultivaluedMapImpl postParams = new MultivaluedMapImpl();
String name = generateString();
postParams.add("name", name);
postParams.add("color", "#ff0000");
ClientResponse response = tagResource.put(ClientResponse.class, postParams);
JSONObject json = response.getEntity(JSONObject.class);
user.tagList.add(json.optString("id"));
log.info("Created tag " + (tagCreatedCount++) + "/" + TAG_PER_USER_COUNT * USER_COUNT);
}
}
// Create documents for each user
int documentCreatedCount = 1;
for (User user : userSet) {
for (int i = 0; i < DOCUMENT_PER_USER_COUNT; i++) {
WebResource documentResource = resource.path("/document");
documentResource.addFilter(new CookieAuthenticationFilter(user.authToken));
MultivaluedMapImpl postParams = new MultivaluedMapImpl();
postParams.add("title", generateString());
postParams.add("description", generateString());
postParams.add("tags", user.tagList.get(ThreadLocalRandom.current().nextInt(user.tagList.size()))); // Random tag
postParams.add("language", "eng");
long createDate = new Date().getTime();
postParams.add("create_date", createDate);
ClientResponse response = documentResource.put(ClientResponse.class, postParams);
JSONObject json = response.getEntity(JSONObject.class);
String documentId = json.optString("id");
log.info("Created document " + (documentCreatedCount++) + "/" + DOCUMENT_PER_USER_COUNT * USER_COUNT + " for user: " + user.username);
// Add files for each document
for (int j = 0; j < FILE_PER_DOCUMENT_COUNT; j++) {
WebResource fileResource = resource.path("/file");
fileResource.addFilter(new CookieAuthenticationFilter(user.authToken));
FormDataMultiPart form = new FormDataMultiPart();
InputStream file = Main.class.getResourceAsStream("/empty.png");
FormDataBodyPart fdp = new FormDataBodyPart("file",
new BufferedInputStream(file),
MediaType.APPLICATION_OCTET_STREAM_TYPE);
form.bodyPart(fdp);
form.field("id", documentId);
response = fileResource.type(MediaType.MULTIPART_FORM_DATA).put(ClientResponse.class, form);
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
}
}
}
}
private static String generateString() {
return UUID.randomUUID().toString().replace("-", "");
}
private static class User {
public String username;
public List<String> tagList = Lists.newArrayList();
public String authToken;
public User(String username, String authToken) {
this.username = username;
this.authToken = authToken;
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 921 B

View File

@ -0,0 +1,6 @@
log4j.rootCategory=WARN, CONSOLE
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%d{DATE} %p %l %m %n
log4j.logger.com.sismics=DEBUG

View File

@ -97,12 +97,6 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.subethamail</groupId>
<artifactId>subethasmtp-wiser</artifactId>

View File

@ -1,26 +1,34 @@
package com.sismics.util.filter;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Date;
import java.util.Locale;
import java.util.Set;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.joda.time.DateTimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.jpa.AuthenticationTokenDao;
import com.sismics.docs.core.dao.jpa.RoleBaseFunctionDao;
import com.sismics.docs.core.dao.jpa.UserDao;
import com.sismics.docs.core.model.jpa.AuthenticationToken;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.security.AnonymousPrincipal;
import com.sismics.security.UserPrincipal;
import com.sismics.util.LocaleUtil;
import org.joda.time.DateTimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.*;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Date;
import java.util.Locale;
import java.util.Set;
/**
* This filter is used to authenticate the user having an active session via an authentication token stored in database.
@ -110,6 +118,7 @@ public class TokenBasedSecurityFilter implements Filter {
// Update the last connection date
authenticationTokenDao.updateLastConnectionDate(authenticationToken.getId());
TransactionUtil.commit();
} else {
injectAnonymousUser(request);
}

View File

@ -85,8 +85,8 @@
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<!-- Test dependencies -->
@ -152,37 +152,6 @@
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>com.samaxes.maven</groupId>
<artifactId>
minify-maven-plugin
</artifactId>
<versionRange>[1.5,)</versionRange>
<goals>
<goal>minify</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore />
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<profiles>
@ -234,6 +203,53 @@
</build>
</profile>
<!-- Stress profile -->
<profile>
<id>stress</id>
<activation>
<property>
<name>env</name>
<value>stress</value>
</property>
</activation>
<build>
<resources>
<resource>
<directory>src/stress/resources</directory>
<filtering>false</filtering>
<excludes>
<exclude>**/config.properties</exclude>
</excludes>
</resource>
<resource>
<directory>src/stress/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/config.properties</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<configuration>
<scanIntervalSeconds>0</scanIntervalSeconds>
<webAppConfig>
<contextPath>/docs-web</contextPath>
<extraClasspath>target/classes;../docs-core/target/classes</extraClasspath>
<overrideDescriptor>src/stress/main/webapp/web-override.xml</overrideDescriptor>
</webAppConfig>
<stopKey>STOPKEY</stopKey>
<stopPort>1099</stopPort>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<!-- Production profile -->
<profile>
<id>prod</id>

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=10
db.version=0

View File

@ -1,5 +1,5 @@
hibernate.connection.driver_class=org.hsqldb.jdbcDriver
hibernate.connection.url=jdbc:hsqldb:mem:docs
hibernate.connection.driver_class=org.h2.Driver
hibernate.connection.url=jdbc:h2:mem:docs
hibernate.connection.username=sa
hibernate.connection.password=
hibernate.hbm2ddl.auto=none

View File

@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=10
db.version=0

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<web-app id="docs"
xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
version="3.0">
<!-- Override init parameter to avoid nasty file locking issue on windows. -->
<servlet>
<servlet-name>default</servlet-name>
<init-param>
<param-name>useFileMappedBuffer</param-name>
<param-value>false</param-value>
</init-param>
</servlet>
</web-app>

View File

@ -0,0 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=0

View File

@ -0,0 +1 @@
\ugggg

View File

@ -0,0 +1,8 @@
log4j.rootCategory=WARN, CONSOLE, MEMORY
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%d{DATE} %p %l %m %n
log4j.appender.MEMORY=com.sismics.util.log4j.MemoryAppender
log4j.appender.MEMORY.size=1000
log4j.logger.com.sismics=DEBUG

View File

@ -1,5 +1,5 @@
hibernate.connection.driver_class=org.hsqldb.jdbcDriver
hibernate.connection.url=jdbc:hsqldb:mem:docs;get_column_name=false
hibernate.connection.driver_class=org.h2.Driver
hibernate.connection.url=jdbc:h2:mem:docs
hibernate.connection.username=sa
hibernate.connection.password=
hibernate.hbm2ddl.auto=none