Closes #114: PostgreSQL compatibility

This commit is contained in:
Benjamin Gamard 2018-03-21 18:58:50 +01:00
parent e5f85c931c
commit 3821a15f9d
11 changed files with 153 additions and 93 deletions

View File

@ -174,6 +174,12 @@
<artifactId>jai-imageio-jpeg2000</artifactId> <artifactId>jai-imageio-jpeg2000</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.2.jre7</version>
</dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
@ -214,20 +220,6 @@
<profile> <profile>
<id>prod</id> <id>prod</id>
</profile> </profile>
<!-- Demo profile -->
<profile>
<id>demo</id>
<build>
<resources>
<resource>
<directory>src/demo/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
</profile>
</profiles> </profiles>
<build> <build>

View File

@ -86,7 +86,7 @@ public class AuthenticationTokenDao {
*/ */
public void updateLastConnectionDate(String id) { public void updateLastConnectionDate(String id) {
StringBuilder sb = new StringBuilder("update T_AUTHENTICATION_TOKEN ato "); StringBuilder sb = new StringBuilder("update T_AUTHENTICATION_TOKEN ato ");
sb.append(" set ato.AUT_LASTCONNECTIONDATE_D = :currentDate "); sb.append(" set AUT_LASTCONNECTIONDATE_D = :currentDate ");
sb.append(" where ato.AUT_ID_C = :id"); sb.append(" where ato.AUT_ID_C = :id");
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();

View File

@ -92,8 +92,8 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, "); StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C, d.DOC_TITLE_C, d.DOC_DESCRIPTION_C, d.DOC_SUBJECT_C, d.DOC_IDENTIFIER_C, d.DOC_PUBLISHER_C, d.DOC_FORMAT_C, d.DOC_SOURCE_C, d.DOC_TYPE_C, d.DOC_COVERAGE_C, d.DOC_RIGHTS_C, d.DOC_CREATEDATE_D, d.DOC_UPDATEDATE_D, d.DOC_LANGUAGE_C, ");
sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null), "); sb.append(" (select count(s.SHA_ID_C) from T_SHARE s, T_ACL ac where ac.ACL_SOURCEID_C = d.DOC_ID_C and ac.ACL_TARGETID_C = s.SHA_ID_C and ac.ACL_DELETEDATE_D is null and s.SHA_DELETEDATE_D is null) shareCount, ");
sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C), "); sb.append(" (select count(f.FIL_ID_C) from T_FILE f where f.FIL_DELETEDATE_D is null and f.FIL_IDDOC_C = d.DOC_ID_C) fileCount, ");
sb.append(" u.USE_USERNAME_C "); sb.append(" u.USE_USERNAME_C ");
sb.append(" from T_DOCUMENT d "); sb.append(" from T_DOCUMENT d ");
sb.append(" join T_USER u on d.DOC_IDUSER_C = u.USE_ID_C "); sb.append(" join T_USER u on d.DOC_IDUSER_C = u.USE_ID_C ");

View File

@ -95,12 +95,12 @@ public class RouteDao {
public void deleteRoute(String routeId) { public void deleteRoute(String routeId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
em.createNativeQuery("update T_ROUTE_STEP rs set rs.RTP_DELETEDATE_D = :dateNow where rs.RTP_IDROUTE_C = :routeId and rs.RTP_DELETEDATE_D is null") em.createNativeQuery("update T_ROUTE_STEP rs set RTP_DELETEDATE_D = :dateNow where rs.RTP_IDROUTE_C = :routeId and rs.RTP_DELETEDATE_D is null")
.setParameter("routeId", routeId) .setParameter("routeId", routeId)
.setParameter("dateNow", new Date()) .setParameter("dateNow", new Date())
.executeUpdate(); .executeUpdate();
em.createNativeQuery("update T_ROUTE r set r.RTE_DELETEDATE_D = :dateNow where r.RTE_ID_C = :routeId and r.RTE_DELETEDATE_D is null") em.createNativeQuery("update T_ROUTE r set RTE_DELETEDATE_D = :dateNow where r.RTE_ID_C = :routeId and r.RTE_DELETEDATE_D is null")
.setParameter("routeId", routeId) .setParameter("routeId", routeId)
.setParameter("dateNow", new Date()) .setParameter("dateNow", new Date())
.executeUpdate(); .executeUpdate();

View File

@ -141,7 +141,7 @@ public class RouteStepDao {
*/ */
public void endRouteStep(String id, RouteStepTransition transition, String comment, String validatorUserId) { public void endRouteStep(String id, RouteStepTransition transition, String comment, String validatorUserId) {
StringBuilder sb = new StringBuilder("update T_ROUTE_STEP r "); StringBuilder sb = new StringBuilder("update T_ROUTE_STEP r ");
sb.append(" set r.RTP_ENDDATE_D = :endDate, r.RTP_TRANSITION_C = :transition, r.RTP_COMMENT_C = :comment, r.RTP_IDVALIDATORUSER_C = :validatorUserId "); sb.append(" set RTP_ENDDATE_D = :endDate, RTP_TRANSITION_C = :transition, RTP_COMMENT_C = :comment, RTP_IDVALIDATORUSER_C = :validatorUserId ");
sb.append(" where r.RTP_ID_C = :id"); sb.append(" where r.RTP_ID_C = :id");
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();

View File

@ -1,7 +1,19 @@
package com.sismics.util.jpa; package com.sismics.util.jpa;
import java.io.File; import com.google.common.base.Strings;
import java.io.FilenameFilter; import com.google.common.io.CharStreams;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ResourceUtil;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -15,22 +27,6 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.ResourceBundle; import java.util.ResourceBundle;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Strings;
import com.google.common.io.CharStreams;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ResourceUtil;
/** /**
* A helper to update the database incrementally. * A helper to update the database incrementally.
* *
@ -48,8 +44,6 @@ abstract class DbOpenHelper {
private Formatter formatter; private Formatter formatter;
private boolean haltOnError;
private Statement stmt; private Statement stmt;
DbOpenHelper(ServiceRegistry serviceRegistry) throws HibernateException { DbOpenHelper(ServiceRegistry serviceRegistry) throws HibernateException {
@ -85,12 +79,13 @@ abstract class DbOpenHelper {
oldVersion = Integer.parseInt(oldVersionStr); oldVersion = Integer.parseInt(oldVersionStr);
} }
} catch (Exception e) { } catch (Exception e) {
if (e.getMessage().contains("not found")) { if (DialectUtil.isObjectNotFound(e.getMessage())) {
log.info("Unable to get database version: Table T_CONFIG not found"); log.info("Unable to get database version: Table T_CONFIG not found");
} else { } else {
log.error("Unable to get database version", e); log.error("Unable to get database version", e);
} }
} finally { } finally {
connection.commit();
if (stmt != null) { if (stmt != null) {
stmt.close(); stmt.close();
stmt = null; stmt = null;
@ -133,15 +128,12 @@ abstract class DbOpenHelper {
* Execute all upgrade scripts in ascending order for a given version. * Execute all upgrade scripts in ascending order for a given version.
* *
* @param version Version number * @param version Version number
* @throws Exception * @throws Exception e
*/ */
void executeAllScript(final int version) throws Exception { void executeAllScript(final int version) throws Exception {
List<String> fileNameList = ResourceUtil.list(getClass(), "/db/update/", new FilenameFilter() { List<String> fileNameList = ResourceUtil.list(getClass(), "/db/update/", (dir, name) -> {
@Override String versionString = String.format("%03d", version);
public boolean accept(File dir, String name) { return name.matches("dbupdate-" + versionString + "-\\d+\\.sql");
String versionString = String.format("%03d", version);
return name.matches("dbupdate-" + versionString + "-\\d+\\.sql");
}
}); });
Collections.sort(fileNameList); Collections.sort(fileNameList);
@ -158,10 +150,9 @@ abstract class DbOpenHelper {
* Execute a SQL script. All statements must be one line only. * Execute a SQL script. All statements must be one line only.
* *
* @param inputScript Script to execute * @param inputScript Script to execute
* @throws IOException * @throws IOException e
* @throws SQLException
*/ */
void executeScript(InputStream inputScript) throws IOException, SQLException { private void executeScript(InputStream inputScript) throws IOException {
List<String> lines = CharStreams.readLines(new InputStreamReader(inputScript)); List<String> lines = CharStreams.readLines(new InputStreamReader(inputScript));
for (String sql : lines) { for (String sql : lines) {
@ -169,22 +160,18 @@ abstract class DbOpenHelper {
continue; continue;
} }
String formatted = formatter.format(sql); String transformed = DialectUtil.transform(sql);
try { if (transformed != null) {
log.debug(formatted); String formatted = formatter.format(transformed);
stmt.executeUpdate(formatted); try {
} catch (SQLException e) { log.debug(formatted);
if (haltOnError) { stmt.executeUpdate(formatted);
if (stmt != null) { } catch (SQLException e) {
stmt.close(); exceptions.add(e);
stmt = null; if (log.isErrorEnabled()) {
log.error("Error executing SQL statement: {}", sql);
log.error(e.getMessage());
} }
throw new JDBCException("Error during script execution", e);
}
exceptions.add(e);
if (log.isErrorEnabled()) {
log.error("Error executing SQL statement: {}", sql);
log.error(e.getMessage());
} }
} }
} }
@ -203,10 +190,6 @@ abstract class DbOpenHelper {
return exceptions; return exceptions;
} }
public void setHaltOnError(boolean haltOnError) {
this.haltOnError = haltOnError;
}
/** /**
* Format the output SQL statements. * Format the output SQL statements.
* *

View File

@ -0,0 +1,55 @@
package com.sismics.util.jpa;
/**
* Dialect utilities.
*
* @author jtremeaux
*/
public class DialectUtil {
/**
* Checks if the error from the drivers relates to an object not found.
*
* @param message Error message
* @return Object not found
*/
public static boolean isObjectNotFound(String message) {
return EMF.isDriverH2() && message.contains("object not found") ||
EMF.isDriverPostgresql() && message.contains("does not exist");
}
/**
* Transform SQL dialect to current dialect.
*
* @param sql SQL to transform
* @return Transformed SQL
*/
public static String transform(String sql) {
if (sql.startsWith("!PGSQL!")) {
return EMF.isDriverH2() ? null : sql.substring(7);
}
if (sql.startsWith("!H2!")) {
return EMF.isDriverPostgresql() ? null : sql.substring(4);
}
if (EMF.isDriverPostgresql()) {
sql = transformToPostgresql(sql);
}
return sql;
}
/**
* Transform SQL from HSQLDB dialect to current dialect.
*
* @param sql SQL to transform
* @return Transformed SQL
*/
public static String transformToPostgresql(String sql) {
sql = sql.replaceAll("(cached|memory) table", "table");
sql = sql.replaceAll("datetime", "timestamp");
sql = sql.replaceAll("longvarchar", "text");
sql = sql.replaceAll("bit not null", "bool not null");
sql = sql.replaceAll("bit default 0", "bool default false");
return sql;
}
}

View File

@ -1,16 +1,6 @@
package com.sismics.util.jpa; package com.sismics.util.jpa;
import java.io.IOException; import com.sismics.docs.core.util.DirectoryUtil;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Environment; import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
@ -18,7 +8,15 @@ import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.sismics.docs.core.util.DirectoryUtil; import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/** /**
* Entity manager factory. * Entity manager factory.
@ -28,18 +26,19 @@ import com.sismics.docs.core.util.DirectoryUtil;
public final class EMF { public final class EMF {
private static final Logger log = LoggerFactory.getLogger(EMF.class); private static final Logger log = LoggerFactory.getLogger(EMF.class);
private static Map<Object, Object> properties;
private static EntityManagerFactory emfInstance; private static EntityManagerFactory emfInstance;
static { static {
try { try {
Map<Object, Object> properties = getEntityManagerProperties(); properties = getEntityManagerProperties();
Environment.verifyProperties(properties); Environment.verifyProperties(properties);
ConfigurationHelper.resolvePlaceHolders(properties); ConfigurationHelper.resolvePlaceHolders(properties);
ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build(); ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build();
DbOpenHelper openHelper = new DbOpenHelper(reg) { DbOpenHelper openHelper = new DbOpenHelper(reg) {
@Override @Override
public void onCreate() throws Exception { public void onCreate() throws Exception {
executeAllScript(0); executeAllScript(0);
@ -78,15 +77,27 @@ public final class EMF {
} }
// Use environment parameters // Use environment parameters
String databaseUrl = System.getenv("DATABASE_URL");
String databaseUsername = System.getenv("DATABASE_USER");
String databasePassword = System.getenv("DATABASE_PASSWORD");
log.info("Configuring EntityManager from environment parameters"); log.info("Configuring EntityManager from environment parameters");
Map<Object, Object> props = new HashMap<Object, Object>(); Map<Object, Object> props = new HashMap<>();
props.put("hibernate.connection.driver_class", "org.h2.Driver");
Path dbDirectory = DirectoryUtil.getDbDirectory(); Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString(); String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536"); if (databaseUrl == null) {
props.put("hibernate.connection.username", "sa"); props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536");
props.put("hibernate.connection.username", "sa");
} else {
props.put("hibernate.connection.driver_class", "org.postgresql.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQL94Dialect");
props.put("hibernate.connection.url", databaseUrl);
props.put("hibernate.connection.username", databaseUsername);
props.put("hibernate.connection.password", databasePassword);
}
props.put("hibernate.hbm2ddl.auto", ""); props.put("hibernate.hbm2ddl.auto", "");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.show_sql", "false"); props.put("hibernate.show_sql", "false");
props.put("hibernate.format_sql", "false"); props.put("hibernate.format_sql", "false");
props.put("hibernate.max_fetch_depth", "5"); props.put("hibernate.max_fetch_depth", "5");
@ -112,4 +123,18 @@ public final class EMF {
public static EntityManagerFactory get() { public static EntityManagerFactory get() {
return emfInstance; return emfInstance;
} }
public static boolean isDriverH2() {
String driver = getDriver();
return driver.contains("h2");
}
public static boolean isDriverPostgresql() {
String driver = getDriver();
return driver.contains("postgresql");
}
public static String getDriver() {
return (String) properties.get("hibernate.connection.driver_class");
}
} }

View File

@ -1,4 +1,4 @@
SET IGNORECASE TRUE; !H2!SET IGNORECASE TRUE;
create memory table T_AUTHENTICATION_TOKEN ( AUT_ID_C varchar(36) not null, AUT_IDUSER_C varchar(36) not null, AUT_LONGLASTED_B bit not null, AUT_CREATIONDATE_D datetime not null, AUT_LASTCONNECTIONDATE_D datetime, AUT_IP_C varchar(45), AUT_UA_C varchar(1000), primary key (AUT_ID_C) ); create memory table T_AUTHENTICATION_TOKEN ( AUT_ID_C varchar(36) not null, AUT_IDUSER_C varchar(36) not null, AUT_LONGLASTED_B bit not null, AUT_CREATIONDATE_D datetime not null, AUT_LASTCONNECTIONDATE_D datetime, AUT_IP_C varchar(45), AUT_UA_C varchar(1000), primary key (AUT_ID_C) );
create memory table T_BASE_FUNCTION ( BAF_ID_C varchar(20) not null, primary key (BAF_ID_C) ); create memory table T_BASE_FUNCTION ( BAF_ID_C varchar(20) not null, primary key (BAF_ID_C) );
create cached table T_FILE ( FIL_ID_C varchar(36) not null, FIL_IDDOC_C varchar(36), FIL_IDUSER_C varchar(36) not null, FIL_MIMETYPE_C varchar(100) not null, FIL_CREATEDATE_D datetime, FIL_DELETEDATE_D datetime, FIL_ORDER_N int, FIL_CONTENT_C longvarchar, primary key (FIL_ID_C) ); create cached table T_FILE ( FIL_ID_C varchar(36) not null, FIL_IDDOC_C varchar(36), FIL_IDUSER_C varchar(36) not null, FIL_MIMETYPE_C varchar(100) not null, FIL_CREATEDATE_D datetime, FIL_DELETEDATE_D datetime, FIL_ORDER_N int, FIL_CONTENT_C longvarchar, primary key (FIL_ID_C) );

View File

@ -1,2 +1,5 @@
alter table T_DOCUMENT alter column DOC_LANGUAGE_C varchar(7) default 'eng' not null; !H2!alter table T_DOCUMENT alter column DOC_LANGUAGE_C varchar(7) default 'eng' not null;
!PGSQL!alter table T_DOCUMENT alter column DOC_LANGUAGE_C type varchar(7);
!PGSQL!alter table T_DOCUMENT alter column DOC_LANGUAGE_C set default 'eng';
!PGSQL!alter table T_DOCUMENT alter column DOC_LANGUAGE_C set not null;
update T_CONFIG set CFG_VALUE_C = '12' where CFG_ID_C = 'DB_VERSION'; update T_CONFIG set CFG_VALUE_C = '12' where CFG_ID_C = 'DB_VERSION';

View File

@ -1,5 +1,7 @@
alter table T_DOCUMENT add column DOC_UPDATEDATE_D datetime; alter table T_DOCUMENT add column DOC_UPDATEDATE_D datetime;
update T_DOCUMENT set DOC_UPDATEDATE_D = DOC_CREATEDATE_D; update T_DOCUMENT set DOC_UPDATEDATE_D = DOC_CREATEDATE_D;
alter table T_DOCUMENT alter column DOC_UPDATEDATE_D datetime not null; !H2!alter table T_DOCUMENT alter column DOC_UPDATEDATE_D datetime not null;
!PGSQL!alter table T_DOCUMENT alter column DOC_UPDATEDATE_D type timestamp;
!PGSQL!alter table T_DOCUMENT alter column DOC_UPDATEDATE_D set not null;
alter table T_ROUTE_STEP add column RTP_TRANSITIONS_C varchar(2000); alter table T_ROUTE_STEP add column RTP_TRANSITIONS_C varchar(2000);
update T_CONFIG set CFG_VALUE_C = '18' where CFG_ID_C = 'DB_VERSION'; update T_CONFIG set CFG_VALUE_C = '18' where CFG_ID_C = 'DB_VERSION';