Merge remote-tracking branch 'origin/master'

This commit is contained in:
Jean-Marc Tremeaux 2020-09-24 12:45:08 +02:00
commit 7488ac15a7
219 changed files with 6874 additions and 1314 deletions

3
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,3 @@
# These are supported funding model platforms
github: [jendib]

6
.gitignore vendored
View File

@ -11,6 +11,6 @@
*.iml *.iml
node_modules node_modules
import_test import_test
docs-importer-linux teedy-importer-linux
docs-importer-macos teedy-importer-macos
docs-importer-win.exe teedy-importer-win.exe

View File

@ -4,17 +4,27 @@ language: java
before_install: before_install:
- sudo add-apt-repository -y ppa:mc3man/trusty-media - sudo add-apt-repository -y ppa:mc3man/trusty-media
- sudo apt-get -qq update - sudo apt-get -qq update
- sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra - sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun tesseract-ocr-fin tesseract-ocr-swe tesseract-ocr-lav tesseract-ocr-dan
- sudo apt-get -y -q install haveged && sudo service haveged start - sudo apt-get -y -q install haveged && sudo service haveged start
after_success: after_success:
- mvn -Pprod -DskipTests clean install - |
- docker login -u $DOCKER_USER -p $DOCKER_PASS if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
- export REPO=sismics/docs mvn -Pprod -DskipTests clean install
- export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi` docker login -u $DOCKER_USER -p $DOCKER_PASS
- docker build -f Dockerfile -t $REPO:$COMMIT . export REPO=sismics/docs
- docker tag $REPO:$COMMIT $REPO:$TAG export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
- docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER docker build -f Dockerfile -t $REPO:$COMMIT .
- docker push $REPO docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
cd docs-importer
export REPO=sismics/docs-importer
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -f Dockerfile -t $REPO:$COMMIT .
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
fi
env: env:
global: global:
- secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0= - secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0=

View File

@ -1,7 +1,7 @@
FROM sismics/ubuntu-jetty:9.4.12 FROM sismics/ubuntu-jetty:9.4.12
MAINTAINER b.gamard@sismics.com MAINTAINER b.gamard@sismics.com
RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra && \ RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun tesseract-ocr-fin tesseract-ocr-swe tesseract-ocr-lav tesseract-ocr-dan && \
apt-get clean && rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
# Remove the embedded javax.mail jar from Jetty # Remove the embedded javax.mail jar from Jetty
@ -9,3 +9,5 @@ RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar
ADD docs.xml /opt/jetty/webapps/docs.xml ADD docs.xml /opt/jetty/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war
ENV JAVA_OPTIONS -Xmx1g

View File

@ -1,27 +1,24 @@
<h3 align="center"> <h3 align="center">
<img src="https://www.sismicsdocs.com/img/github-title.png" alt="Sismics Docs" width=500 /> <img src="https://teedy.io/img/github-title.png" alt="Teedy" width=500 />
</h3> </h3>
[![Twitter: @sismicsdocs](https://img.shields.io/badge/contact-@sismicsdocs-blue.svg?style=flat)](https://twitter.com/sismicsdocs)
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) [![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Build Status](https://secure.travis-ci.org/sismics/docs.png)](http://travis-ci.org/sismics/docs) [![Build Status](https://secure.travis-ci.org/sismics/docs.png)](http://travis-ci.org/sismics/docs)
Docs is an open source, lightweight document management system for individuals and businesses. Teedy is an open source, lightweight document management system for individuals and businesses.
**Discuss it on [Product Hunt](https://www.producthunt.com/posts/sismics-docs) 🦄**
<hr /> <hr />
<h2 align="center"> <h2 align="center">
We just launched a Cloud version of Sismics Docs! Head to <a href="https://www.sismicsdocs.com/">sismicsdocs.com</a> for more informations <a href="https://github.com/users/jendib/sponsorship">Sponsor this project if you use and appreciate it!</a>
</h2> </h2>
<hr /> <hr />
![New!](https://www.sismicsdocs.com/img/laptop-demo.png?20180301) ![New!](https://teedy.io/img/laptop-demo.png?20180301)
Demo Demo
---- ----
A demo is available at [demo.sismicsdocs.com](https://demo.sismicsdocs.com) A demo is available at [demo.teedy.io](https://demo.teedy.io)
- Guest login is enabled with read access on all documents - Guest login is enabled with read access on all documents
- "admin" login with "admin" password - "admin" login with "admin" password
- "demo" login with "password" password - "demo" login with "password" password
@ -31,16 +28,19 @@ Features
- Responsive user interface - Responsive user interface
- Optical character recognition - Optical character recognition
- LDAP authentication ![New!](https://www.sismics.com/public/img/new.png)
- Support image, PDF, ODT, DOCX, PPTX files - Support image, PDF, ODT, DOCX, PPTX files
- Video file support ![New!](https://www.sismics.com/public/img/new.png) - Video file support
- Flexible search engine with suggestions and highlighting - Flexible search engine with suggestions and highlighting
- Full text search in all supported files - Full text search in all supported files
- All [Dublin Core](http://dublincore.org/) metadata - All [Dublin Core](http://dublincore.org/) metadata
- Custom user-defined metadata ![New!](https://www.sismics.com/public/img/new.png)
- Workflow system ![New!](https://www.sismics.com/public/img/new.png) - Workflow system ![New!](https://www.sismics.com/public/img/new.png)
- 256-bit AES encryption of stored files - 256-bit AES encryption of stored files
- File versioning ![New!](https://www.sismics.com/public/img/new.png)
- Tag system with nesting - Tag system with nesting
- Import document from email (EML format) ![New!](https://www.sismics.com/public/img/new.png) - Import document from email (EML format)
- Automatic inbox scanning and importing ![New!](https://www.sismics.com/public/img/new.png) - Automatic inbox scanning and importing
- User/group permission system - User/group permission system
- 2-factor authentication - 2-factor authentication
- Hierarchical groups - Hierarchical groups
@ -49,22 +49,23 @@ Features
- Storage quota per user - Storage quota per user
- Document sharing by URL - Document sharing by URL
- RESTful Web API - RESTful Web API
- Webhooks to trigger external service ![New!](https://www.sismics.com/public/img/new.png) - Webhooks to trigger external service
- Fully featured Android client - Fully featured Android client
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode) ![New!](https://www.sismics.com/public/img/new.png) - [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
- Tested to one million documents - Tested to one million documents
Install with Docker Install with Docker
------------------- -------------------
From a Docker host, run this command to download and install Sismics Docs. The server will run on <http://[your-docker-host-ip]:8100>. A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
- Latest stable version: `sismics/docs:v1.8`
docker run --rm --name sismics_docs_latest -d -e DOCS_BASE_URL='http://[your-docker-host-ip]:8100' -p 8100:8080 -v sismics_docs_latest:/data sismics/docs:latest The data directory is `/data`. Don't forget to mount a volume on it.
<img src="http://www.newdesignfile.com/postpic/2011/01/green-info-icon_206509.png" width="16px" height="16px"> **Note:** You will need to change [your-docker-host-ip] with the IP address or FQDN of your docker host e.g.
FQDN: http://docs.sismics.com To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
IP: http://192.168.100.10
Manual installation Manual installation
------------------- -------------------
@ -80,12 +81,12 @@ Manual installation
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format. The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
How to build Docs from the sources How to build Teedy from the sources
---------------------------------- ----------------------------------
Prerequisites: JDK 8 with JCE, Maven 3, Tesseract 3 or 4 Prerequisites: JDK 8 with JCE, Maven 3, NPM, Grunt, Tesseract 3 or 4
Docs is organized in several Maven modules: Teedy is organized in several Maven modules:
- docs-core - docs-core
- docs-web - docs-web
@ -121,19 +122,8 @@ All contributions are more than welcomed. Contributions may close an issue, fix
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there. The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
Community
---------
Get updates on Sismics Docs' development and chat with the project maintainers:
- Follow [@sismicsdocs on Twitter](https://twitter.com/sismicsdocs)
- Read and subscribe to [The Official Sismics Docs Blog](https://blog.sismicsdocs.com/)
- Check the [Official Website](https://www.sismicsdocs.com)
- Join us [on Facebook](https://www.facebook.com/sismicsdocs)
License License
------- -------
Docs is released under the terms of the GPL license. See `COPYING` for more Teedy is released under the terms of the GPL license. See `COPYING` for more
information or see <http://opensource.org/licenses/GPL-2.0>. information or see <http://opensource.org/licenses/GPL-2.0>.

View File

@ -4,7 +4,7 @@ buildscript {
google() google()
} }
dependencies { dependencies {
classpath 'com.android.tools.build:gradle:3.2.1' classpath 'com.android.tools.build:gradle:3.4.0'
} }
} }
apply plugin: 'com.android.application' apply plugin: 'com.android.application'

View File

@ -8,13 +8,14 @@
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" /> <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" /> <uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<application <application
android:name=".MainApplication" android:name=".MainApplication"
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@mipmap/ic_launcher"
android:label="@string/app_name" android:label="@string/app_name"
android:theme="@style/AppTheme" > android:theme="@style/AppTheme">
<activity <activity
android:name=".activity.LoginActivity" android:name=".activity.LoginActivity"
android:label="@string/app_name" android:label="@string/app_name"

View File

@ -1,6 +1,7 @@
package com.sismics.docs; package com.sismics.docs;
import android.app.Application; import android.app.Application;
import android.support.v7.app.AppCompatDelegate;
import com.sismics.docs.model.application.ApplicationContext; import com.sismics.docs.model.application.ApplicationContext;
import com.sismics.docs.util.PreferenceUtil; import com.sismics.docs.util.PreferenceUtil;
@ -22,5 +23,7 @@ public class MainApplication extends Application {
// TODO Provide documents to intent action get content // TODO Provide documents to intent action get content
super.onCreate(); super.onCreate();
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_NO);
} }
} }

View File

@ -52,7 +52,7 @@ public class AuditLogActivity extends AppCompatActivity {
} }
// Configure the swipe refresh layout // Configure the swipe refresh layout
SwipeRefreshLayout swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipeRefreshLayout); SwipeRefreshLayout swipeRefreshLayout = findViewById(R.id.swipeRefreshLayout);
swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_bright, swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_bright,
android.R.color.holo_green_light, android.R.color.holo_green_light,
android.R.color.holo_orange_light, android.R.color.holo_orange_light,
@ -65,7 +65,7 @@ public class AuditLogActivity extends AppCompatActivity {
}); });
// Navigate to user profile on click // Navigate to user profile on click
final ListView auditLogListView = (ListView) findViewById(R.id.auditLogListView); final ListView auditLogListView = findViewById(R.id.auditLogListView);
auditLogListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { auditLogListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override @Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) { public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
@ -88,15 +88,15 @@ public class AuditLogActivity extends AppCompatActivity {
* Refresh the view. * Refresh the view.
*/ */
private void refreshView(String documentId) { private void refreshView(String documentId) {
final SwipeRefreshLayout swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipeRefreshLayout); final SwipeRefreshLayout swipeRefreshLayout = findViewById(R.id.swipeRefreshLayout);
final ProgressBar progressBar = (ProgressBar) findViewById(R.id.progressBar); final ProgressBar progressBar = findViewById(R.id.progressBar);
final ListView auditLogListView = (ListView) findViewById(R.id.auditLogListView); final ListView auditLogListView = findViewById(R.id.auditLogListView);
progressBar.setVisibility(View.VISIBLE); progressBar.setVisibility(View.VISIBLE);
auditLogListView.setVisibility(View.GONE); auditLogListView.setVisibility(View.GONE);
AuditLogResource.list(this, documentId, new HttpCallback() { AuditLogResource.list(this, documentId, new HttpCallback() {
@Override @Override
public void onSuccess(JSONObject response) { public void onSuccess(JSONObject response) {
auditLogListView.setAdapter(new AuditLogListAdapter(response.optJSONArray("logs"))); auditLogListView.setAdapter(new AuditLogListAdapter(AuditLogActivity.this, response.optJSONArray("logs")));
} }
@Override @Override

View File

@ -1,8 +1,6 @@
package com.sismics.docs.adapter; package com.sismics.docs.adapter;
import android.content.Context; import android.content.Context;
import android.content.Intent;
import android.text.TextUtils;
import android.text.format.DateFormat; import android.text.format.DateFormat;
import android.view.LayoutInflater; import android.view.LayoutInflater;
import android.view.View; import android.view.View;
@ -30,12 +28,19 @@ public class AuditLogListAdapter extends BaseAdapter {
*/ */
private List<JSONObject> logList; private List<JSONObject> logList;
/**
* Context.
*/
private Context context;
/** /**
* Audit log list adapter. * Audit log list adapter.
* *
* @param context Context
* @param logs Logs * @param logs Logs
*/ */
public AuditLogListAdapter(JSONArray logs) { public AuditLogListAdapter(Context context, JSONArray logs) {
this.context = context;
this.logList = new ArrayList<>(); this.logList = new ArrayList<>();
for (int i = 0; i < logs.length(); i++) { for (int i = 0; i < logs.length(); i++) {
@ -67,11 +72,21 @@ public class AuditLogListAdapter extends BaseAdapter {
// Build message // Build message
final JSONObject log = getItem(position); final JSONObject log = getItem(position);
StringBuilder message = new StringBuilder(log.optString("class")); StringBuilder message = new StringBuilder();
// Translate entity name
int stringId = context.getResources().getIdentifier("auditlog_" + log.optString("class"), "string", context.getPackageName());
if (stringId == 0) {
message.append(log.optString("class"));
} else {
message.append(context.getResources().getString(stringId));
}
message.append(" ");
switch (log.optString("type")) { switch (log.optString("type")) {
case "CREATE": message.append(" created"); break; case "CREATE": message.append(context.getResources().getString(R.string.auditlog_created)); break;
case "UPDATE": message.append(" updated"); break; case "UPDATE": message.append(context.getResources().getString(R.string.auditlog_updated)); break;
case "DELETE": message.append(" deleted"); break; case "DELETE": message.append(context.getResources().getString(R.string.auditlog_deleted)); break;
} }
switch (log.optString("class")) { switch (log.optString("class")) {
case "Document": case "Document":
@ -85,9 +100,9 @@ public class AuditLogListAdapter extends BaseAdapter {
} }
// Fill the view // Fill the view
TextView usernameTextView = (TextView) view.findViewById(R.id.usernameTextView); TextView usernameTextView = view.findViewById(R.id.usernameTextView);
TextView messageTextView = (TextView) view.findViewById(R.id.messageTextView); TextView messageTextView = view.findViewById(R.id.messageTextView);
TextView dateTextView = (TextView) view.findViewById(R.id.dateTextView); TextView dateTextView = view.findViewById(R.id.dateTextView);
usernameTextView.setText(log.optString("username")); usernameTextView.setText(log.optString("username"));
messageTextView.setText(message); messageTextView.setText(message);
String date = DateFormat.getDateFormat(parent.getContext()).format(new Date(log.optLong("create_date"))); String date = DateFormat.getDateFormat(parent.getContext()).format(new Date(log.optLong("create_date")));

View File

@ -33,6 +33,7 @@ public class LanguageAdapter extends BaseAdapter {
} }
languageList.add(new Language("fra", R.string.language_french, R.drawable.fra)); languageList.add(new Language("fra", R.string.language_french, R.drawable.fra));
languageList.add(new Language("eng", R.string.language_english, R.drawable.eng)); languageList.add(new Language("eng", R.string.language_english, R.drawable.eng));
languageList.add(new Language("deu", R.string.language_german, R.drawable.deu));
} }
@Override @Override

View File

@ -63,14 +63,13 @@ public class DocListFragment extends Fragment {
recyclerView.setAdapter(adapter); recyclerView.setAdapter(adapter);
recyclerView.setHasFixedSize(true); recyclerView.setHasFixedSize(true);
recyclerView.setLongClickable(true); recyclerView.setLongClickable(true);
recyclerView.addItemDecoration(new DividerItemDecoration(getResources().getDrawable(R.drawable.abc_list_divider_mtrl_alpha)));
// Configure the LayoutManager // Configure the LayoutManager
final LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity()); final LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity());
recyclerView.setLayoutManager(layoutManager); recyclerView.setLayoutManager(layoutManager);
// Configure the swipe refresh layout // Configure the swipe refresh layout
swipeRefreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipeRefreshLayout); swipeRefreshLayout = view.findViewById(R.id.swipeRefreshLayout);
swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_bright, swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_bright,
android.R.color.holo_green_light, android.R.color.holo_green_light,
android.R.color.holo_orange_light, android.R.color.holo_orange_light,
@ -194,7 +193,7 @@ public class DocListFragment extends Fragment {
private void loadDocuments(final View view, final boolean reset) { private void loadDocuments(final View view, final boolean reset) {
if (view == null) return; if (view == null) return;
final View progressBar = view.findViewById(R.id.progressBar); final View progressBar = view.findViewById(R.id.progressBar);
final TextView documentsEmptyView = (TextView) view.findViewById(R.id.documentsEmptyView); final TextView documentsEmptyView = view.findViewById(R.id.documentsEmptyView);
if (reset) { if (reset) {
loading = true; loading = true;

View File

@ -1,10 +1,12 @@
package com.sismics.docs.service; package com.sismics.docs.service;
import android.app.IntentService; import android.app.IntentService;
import android.app.NotificationChannel;
import android.app.NotificationManager; import android.app.NotificationManager;
import android.app.PendingIntent; import android.app.PendingIntent;
import android.content.Intent; import android.content.Intent;
import android.net.Uri; import android.net.Uri;
import android.os.Build;
import android.os.PowerManager; import android.os.PowerManager;
import android.support.v4.app.NotificationCompat; import android.support.v4.app.NotificationCompat;
import android.support.v4.app.NotificationCompat.Builder; import android.support.v4.app.NotificationCompat.Builder;
@ -29,7 +31,8 @@ import okhttp3.internal.Util;
* @author bgamard * @author bgamard
*/ */
public class FileUploadService extends IntentService { public class FileUploadService extends IntentService {
private static final String TAG = "FileUploadService"; private static final String TAG = "sismicsdocs:fileupload";
private static final String CHANNEL_ID = "FileUploadService";
private static final int UPLOAD_NOTIFICATION_ID = 1; private static final int UPLOAD_NOTIFICATION_ID = 1;
private static final int UPLOAD_NOTIFICATION_ID_DONE = 2; private static final int UPLOAD_NOTIFICATION_ID_DONE = 2;
@ -49,18 +52,30 @@ public class FileUploadService extends IntentService {
super.onCreate(); super.onCreate();
notificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); notificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
notification = new NotificationCompat.Builder(this); initChannels();
notification = new NotificationCompat.Builder(this, CHANNEL_ID);
PowerManager pm = (PowerManager) getSystemService(POWER_SERVICE); PowerManager pm = (PowerManager) getSystemService(POWER_SERVICE);
wakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG); wakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG);
} }
private void initChannels() {
if (Build.VERSION.SDK_INT < 26) {
return;
}
NotificationChannel channel = new NotificationChannel(CHANNEL_ID,
"File Upload", NotificationManager.IMPORTANCE_HIGH);
channel.setDescription("Used to show file upload progress");
notificationManager.createNotificationChannel(channel);
}
@Override @Override
protected void onHandleIntent(Intent intent) { protected void onHandleIntent(Intent intent) {
if (intent == null) { if (intent == null) {
return; return;
} }
wakeLock.acquire(); wakeLock.acquire(60_000 * 30); // 30 minutes upload time maximum
try { try {
onStart(); onStart();
handleFileUpload(intent.getStringExtra(PARAM_DOCUMENT_ID), (Uri) intent.getParcelableExtra(PARAM_URI)); handleFileUpload(intent.getStringExtra(PARAM_DOCUMENT_ID), (Uri) intent.getParcelableExtra(PARAM_URI));
@ -77,7 +92,7 @@ public class FileUploadService extends IntentService {
* *
* @param documentId Document ID * @param documentId Document ID
* @param uri Data URI * @param uri Data URI
* @throws IOException * @throws IOException e
*/ */
private void handleFileUpload(final String documentId, final Uri uri) throws Exception { private void handleFileUpload(final String documentId, final Uri uri) throws Exception {
final InputStream is = getContentResolver().openInputStream(uri); final InputStream is = getContentResolver().openInputStream(uri);

View File

@ -156,7 +156,7 @@ public class OkHttpUtil {
public static OkHttpClient buildClient(final Context context) { public static OkHttpClient buildClient(final Context context) {
// One-time header computation // One-time header computation
if (userAgent == null) { if (userAgent == null) {
userAgent = "Sismics Docs Android " + ApplicationUtil.getVersionName(context) + "/Android " + Build.VERSION.RELEASE + "/" + Build.MODEL; userAgent = "Teedy Android " + ApplicationUtil.getVersionName(context) + "/Android " + Build.VERSION.RELEASE + "/" + Build.MODEL;
} }
if (acceptLanguage == null) { if (acceptLanguage == null) {

View File

@ -39,7 +39,9 @@ public class SearchQueryBuilder {
*/ */
public SearchQueryBuilder simpleSearch(String simpleSearch) { public SearchQueryBuilder simpleSearch(String simpleSearch) {
if (isValid(simpleSearch)) { if (isValid(simpleSearch)) {
query.append(SEARCH_SEPARATOR).append(simpleSearch); query.append(SEARCH_SEPARATOR)
.append("simple:")
.append(simpleSearch);
} }
return this; return this;
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

View File

@ -29,7 +29,7 @@
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:fontFamily="sans-serif-light" android:fontFamily="sans-serif-light"
android:textColor="#212121" android:textColor="?android:attr/textColorPrimary"
android:text="Test" android:text="Test"
android:textSize="16sp" android:textSize="16sp"
android:ellipsize="end" android:ellipsize="end"
@ -46,7 +46,7 @@
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:fontFamily="sans-serif-light" android:fontFamily="sans-serif-light"
android:textColor="#777777" android:textColor="?android:attr/textColorPrimary"
android:text="test2" android:text="test2"
android:textSize="16sp" android:textSize="16sp"
android:maxLines="1" android:maxLines="1"
@ -69,7 +69,7 @@
android:layout_alignParentEnd="true" android:layout_alignParentEnd="true"
android:layout_alignParentRight="true" android:layout_alignParentRight="true"
android:layout_alignParentTop="true" android:layout_alignParentTop="true"
android:textColor="#777777" android:textColor="?android:attr/textColorPrimary"
android:fontFamily="sans-serif-light"/> android:fontFamily="sans-serif-light"/>
</RelativeLayout> </RelativeLayout>

View File

@ -9,7 +9,6 @@
<android.support.design.widget.CoordinatorLayout <android.support.design.widget.CoordinatorLayout
xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/overview_coordinator_layout" android:id="@+id/overview_coordinator_layout"
android:theme="@style/ThemeOverlay.AppCompat.Dark.ActionBar"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent"> android:layout_height="match_parent">
@ -22,7 +21,7 @@
android:id="@+id/toolbar" android:id="@+id/toolbar"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize" android:layout_height="?attr/actionBarSize"
app:popupTheme="@style/ThemeOverlay.AppCompat.Light" app:popupTheme="@style/AppTheme"
app:layout_scrollFlags="enterAlways|scroll|snap" /> app:layout_scrollFlags="enterAlways|scroll|snap" />
</android.support.design.widget.AppBarLayout> </android.support.design.widget.AppBarLayout>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 9.7 KiB

View File

@ -0,0 +1,159 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Validation -->
<string name="validate_error_email">Ungültige E-Mail</string>
<string name="validate_error_length_min">Zu kurz (min. %d)</string>
<string name="validate_error_length_max">Zu lang (max. %d)</string>
<string name="validate_error_required">Erforderlich</string>
<string name="validate_error_alphanumeric">Nur Buchstaben und Zahlen</string>
<!-- App -->
<string name="drawer_open">Navigationsleiste öffnen</string>
<string name="drawer_close">Navigationsleiste schließen</string>
<string name="login_explain"><![CDATA[Um zu beginnen, müssen Sie Teedy Server herunterladen und installieren <a href="https://github.com/sismics/docs">github.com/sismics/docs</a>, sowie die Login-Daten unten eingeben]]></string>
<string name="server">Server</string>
<string name="username">Username</string>
<string name="password">Password</string>
<string name="login">Login</string>
<string name="ok">OK</string>
<string name="cancel">Abbrechen</string>
<string name="login_fail_title">Login gescheitert</string>
<string name="login_fail">Benutzername oder Passwort falsch</string>
<string name="network_error_title">Netzwerkfehler</string>
<string name="network_error">Netzwerkfehler, überprüfen Sie die Internetverbindung und die Server-URL</string>
<string name="invalid_url_title">Ungültige URL</string>
<string name="invalid_url">Bitte überprüfen Sie die Server-URL und versuchen Sie es erneut</string>
<string name="crash_toast_text">Ein Absturz ist aufgetreten, ein Bericht wurde gesendet, um dieses Problem zu beheben</string>
<string name="created_date">Erstellungsdatum</string>
<string name="download_file">Aktuelle Datei herunterladen</string>
<string name="download_document">Herunterladen</string>
<string name="action_search">Dokumente durchsuchen</string>
<string name="all_documents">Alle Dokumente</string>
<string name="shared_documents">Geteilte Dokumente</string>
<string name="all_tags">Alle Tags</string>
<string name="no_tags">Keine Tags</string>
<string name="error_loading_tags">Fehler beim Laden von Tags</string>
<string name="no_documents">Keine Dokumente</string>
<string name="error_loading_documents">Fehler beim Laden von Dokumenten</string>
<string name="no_files">Keine Dateien</string>
<string name="error_loading_files">Fehler beim Laden von Dateien</string>
<string name="new_document">Neues Dokument</string>
<string name="share">Teilen</string>
<string name="close">Schließen</string>
<string name="add">Hinzufügen</string>
<string name="add_share_hint">Freigabename (optional)</string>
<string name="document_not_shared">Dieses Dokument wird derzeit nicht freigegeben</string>
<string name="delete_share">Diese Freigabe löschen</string>
<string name="send_share">Send this share link</string>
<string name="error_loading_shares">Fehler beim Laden von Freigaben</string>
<string name="error_adding_share">Fehler beim Hinzufügen der Freigabe</string>
<string name="share_default_name">Freigabe Link</string>
<string name="error_deleting_share">Fehler beim Löschen der Freigabe</string>
<string name="send_share_to">Freigabe senden an</string>
<string name="upload_file">Datei hinzufügen</string>
<string name="upload_from">Datei hochladen von</string>
<string name="settings">Einstellungen</string>
<string name="logout">Ausloggen</string>
<string name="version">Version</string>
<string name="build">Build</string>
<string name="pref_advanced_category">Erweiterte Einstellungen</string>
<string name="pref_about_category">Über</string>
<string name="pref_github">GitHub</string>
<string name="pref_issue">Fehler berichten</string>
<string name="pref_clear_cache_title">Cache leeren</string>
<string name="pref_clear_cache_summary">Zwischengespeicherte Dateien löschen</string>
<string name="pref_clear_cache_success">Cache wurde geleert</string>
<string name="pref_clear_history_title">Suchhistorie löschen</string>
<string name="pref_clear_history_summary">Leert die aktuellen Suchvorschläge</string>
<string name="pref_clear_history_success">Suchvorschläge wurden gelöscht</string>
<string name="pref_cache_size">Cache Größe</string>
<string name="save">Speichern</string>
<string name="edit_document">Bearbeiten</string>
<string name="error_editing_document">Netzwerkfehler, bitte versuchen Sie es erneut</string>
<string name="please_wait">Bitte warten</string>
<string name="document_editing_message">Daten werden gesendet</string>
<string name="delete_document">Löschen</string>
<string name="delete_document_title">Dokument löschen</string>
<string name="delete_document_message">Dieses Dokument und alle zugehörigen Dateien wirklich löschen?</string>
<string name="document_delete_failure">Netzwerkfehler beim Löschen des Dokuments</string>
<string name="document_deleting_message">Lösche Dokument</string>
<string name="delete_file_title">Datei löschen</string>
<string name="delete_file_message">Die aktuelle Datei wirklich löschen?</string>
<string name="file_delete_failure">Netzwerkfehler beim Löschen der Datei</string>
<string name="file_deleting_message">Lösche Datei</string>
<string name="error_reading_file">Fehler beim Lesen der Datei</string>
<string name="upload_notification_title">Teedy</string>
<string name="upload_notification_message">Neue Datei in das Dokument hochladen</string>
<string name="upload_notification_error">Fehler beim Hochladen der neuen Datei</string>
<string name="delete_file">Aktuelle Datei löschen</string>
<string name="advanced_search">Erweiterte Suche</string>
<string name="search">Suche</string>
<string name="add_tags">Tags hinzufügen</string>
<string name="creation_date">Erstellungsdatum</string>
<string name="description">Beschreibung</string>
<string name="title">Titel</string>
<string name="simple_search">Einfache Suche</string>
<string name="fulltext_search">Volltextsuche</string>
<string name="creator">Ersteller</string>
<string name="after_date">Nach Datum</string>
<string name="before_date">Vor Datum</string>
<string name="search_tags">Tags durchsuchen</string>
<string name="all_languages">Alle Sprachen</string>
<string name="toggle_informations">Informationen anzeigen</string>
<string name="who_can_access">Wer kann darauf zugreifen?</string>
<string name="comments">Kommentare</string>
<string name="no_comments">Keine Kommentare</string>
<string name="error_loading_comments">Fehler beim Laden von Kommentaren</string>
<string name="send">Senden</string>
<string name="add_comment">Kommentar hinzufügen</string>
<string name="comment_add_failure">Fehler beim Hinzufügen des Kommentars</string>
<string name="adding_comment">Füge Kommentar hinzu</string>
<string name="comment_delete">Kommentar löschen</string>
<string name="deleting_comment">Lösche Kommentar</string>
<string name="error_deleting_comment">Fehler beim Löschen des Kommentars</string>
<string name="export_pdf">PDF</string>
<string name="download">Download</string>
<string name="margin">Rand</string>
<string name="fit_image_to_page">Bild an Seite anpassen</string>
<string name="export_comments">Kommentare exportieren</string>
<string name="export_metadata">Metadaten exportieren</string>
<string name="mm">mm</string>
<string name="download_file_title">Teedy Datei Export</string>
<string name="download_document_title">Teedy Dokumentenexport</string>
<string name="download_pdf_title">Teedy PDF Export</string>
<string name="latest_activity">Letzte Aktivität</string>
<string name="activity">Aktivitäten</string>
<string name="email">E-Mail</string>
<string name="storage_quota">Speicherbegrenzung</string>
<string name="storage_display">%1$d/%2$d MB</string>
<string name="validation_code">Validierungscode</string>
<string name="shared">Geteilt</string>
<string name="language">Sprache</string>
<string name="coverage">Geltungsbereich</string>
<string name="type">Typ</string>
<string name="source">Quelle</string>
<string name="format">Format</string>
<string name="publisher">Verleger</string>
<string name="identifier">Identifikator</string>
<string name="subject">Thema</string>
<string name="rights">Rechte</string>
<string name="contributors">Mitwirkende</string>
<string name="relations">Beziehungen</string>
<!-- Audit log -->
<string name="auditlog_Acl">ACL</string>
<string name="auditlog_Comment">Kommentar</string>
<string name="auditlog_Document">Dokument</string>
<string name="auditlog_File">Datei</string>
<string name="auditlog_Group">Gruppe</string>
<string name="auditlog_Route">Workflow</string>
<string name="auditlog_RouteModel">Workflow-Muster</string>
<string name="auditlog_Tag">Tag</string>
<string name="auditlog_User">Benutzer</string>
<string name="auditlog_Webhook">Webhook</string>
<string name="auditlog_created">erstellt</string>
<string name="auditlog_updated">aktualisiert</string>
<string name="auditlog_deleted">gelöscht</string>
</resources>

View File

@ -11,7 +11,7 @@
<!-- App --> <!-- App -->
<string name="drawer_open">Ouvrir le menu de navigation</string> <string name="drawer_open">Ouvrir le menu de navigation</string>
<string name="drawer_close">Fermer le menu de navigation</string> <string name="drawer_close">Fermer le menu de navigation</string>
<string name="login_explain"><![CDATA[Pour commencer, vous devez télécharger et installer le serveur Sismics Docs sur <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> et entrer son URL ci-dessous]]></string> <string name="login_explain"><![CDATA[Pour commencer, vous devez télécharger et installer le serveur Teedy sur <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> et entrer son URL ci-dessous]]></string>
<string name="server">Serveur</string> <string name="server">Serveur</string>
<string name="username">Nom d\'utilisateur</string> <string name="username">Nom d\'utilisateur</string>
<string name="password">Mot de passe</string> <string name="password">Mot de passe</string>
@ -83,7 +83,7 @@
<string name="file_delete_failure">Erreur réseau lors de la suppression du fichier</string> <string name="file_delete_failure">Erreur réseau lors de la suppression du fichier</string>
<string name="file_deleting_message">Suppression du fichier</string> <string name="file_deleting_message">Suppression du fichier</string>
<string name="error_reading_file">Erreur lors de la lecture du fichier</string> <string name="error_reading_file">Erreur lors de la lecture du fichier</string>
<string name="upload_notification_title">Sismics Docs</string> <string name="upload_notification_title">Teedy</string>
<string name="upload_notification_message">Envoi du nouveau fichier</string> <string name="upload_notification_message">Envoi du nouveau fichier</string>
<string name="upload_notification_error">Erreur lors de l\'envoi du nouveau fichier</string> <string name="upload_notification_error">Erreur lors de l\'envoi du nouveau fichier</string>
<string name="delete_file">Supprimer ce fichier</string> <string name="delete_file">Supprimer ce fichier</string>
@ -119,9 +119,9 @@
<string name="export_comments">Exporter les commentaires</string> <string name="export_comments">Exporter les commentaires</string>
<string name="export_metadata">Exporter les métadonnées</string> <string name="export_metadata">Exporter les métadonnées</string>
<string name="mm">mm</string> <string name="mm">mm</string>
<string name="download_file_title">Export de fichier Sismics Docs</string> <string name="download_file_title">Export de fichier Teedy</string>
<string name="download_document_title">Export de document Sismics Docs</string> <string name="download_document_title">Export de document Teedy</string>
<string name="download_pdf_title">Export PDF Sismics Docs</string> <string name="download_pdf_title">Export PDF Teedy</string>
<string name="latest_activity">Activité récente</string> <string name="latest_activity">Activité récente</string>
<string name="activity">Activité</string> <string name="activity">Activité</string>
<string name="email">E-mail</string> <string name="email">E-mail</string>
@ -141,4 +141,19 @@
<string name="contributors">Contributeurs</string> <string name="contributors">Contributeurs</string>
<string name="relations">Relations</string> <string name="relations">Relations</string>
<!-- Audit log -->
<string name="auditlog_Acl">ACL</string>
<string name="auditlog_Comment">Commentaire</string>
<string name="auditlog_Document">Document</string>
<string name="auditlog_File">Fichier</string>
<string name="auditlog_Group">Groupe</string>
<string name="auditlog_Route">Workflow</string>
<string name="auditlog_RouteModel">Modèle de workflow</string>
<string name="auditlog_Tag">Tag</string>
<string name="auditlog_User">Utilisateur</string>
<string name="auditlog_Webhook">Webhook</string>
<string name="auditlog_created">créé</string>
<string name="auditlog_updated">mis à jour</string>
<string name="auditlog_deleted">supprimé</string>
</resources> </resources>

View File

@ -9,10 +9,10 @@
<string name="validate_error_alphanumeric">Only letters and numbers</string> <string name="validate_error_alphanumeric">Only letters and numbers</string>
<!-- App --> <!-- App -->
<string name="app_name" translatable="false">Sismics Docs</string> <string name="app_name" translatable="false">Teedy</string>
<string name="drawer_open">Open navigation drawer</string> <string name="drawer_open">Open navigation drawer</string>
<string name="drawer_close">Close navigation drawer</string> <string name="drawer_close">Close navigation drawer</string>
<string name="login_explain"><![CDATA[To start, you must download and install Sismics Docs Server on <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> and enter its below]]></string> <string name="login_explain"><![CDATA[To start, you must download and install Teedy Server on <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> and enter its below]]></string>
<string name="server">Server</string> <string name="server">Server</string>
<string name="username">Username</string> <string name="username">Username</string>
<string name="password">Password</string> <string name="password">Password</string>
@ -71,6 +71,7 @@
<string name="pref_cache_size">Cache size</string> <string name="pref_cache_size">Cache size</string>
<string name="language_french" translatable="false">Français</string> <string name="language_french" translatable="false">Français</string>
<string name="language_english" translatable="false">English</string> <string name="language_english" translatable="false">English</string>
<string name="language_german" translatable="false">Deutsch</string>
<string name="save">Save</string> <string name="save">Save</string>
<string name="edit_document">Edit</string> <string name="edit_document">Edit</string>
<string name="error_editing_document">Network error, please try again</string> <string name="error_editing_document">Network error, please try again</string>
@ -86,7 +87,7 @@
<string name="file_delete_failure">Network error while deleting the current file</string> <string name="file_delete_failure">Network error while deleting the current file</string>
<string name="file_deleting_message">Deleting file</string> <string name="file_deleting_message">Deleting file</string>
<string name="error_reading_file">Error while reading the file</string> <string name="error_reading_file">Error while reading the file</string>
<string name="upload_notification_title">Sismics Docs</string> <string name="upload_notification_title">Teedy</string>
<string name="upload_notification_message">Uploading the new file to the document</string> <string name="upload_notification_message">Uploading the new file to the document</string>
<string name="upload_notification_error">Error uploading the new file</string> <string name="upload_notification_error">Error uploading the new file</string>
<string name="delete_file">Delete current file</string> <string name="delete_file">Delete current file</string>
@ -122,9 +123,9 @@
<string name="export_comments">Export comments</string> <string name="export_comments">Export comments</string>
<string name="export_metadata">Export metadata</string> <string name="export_metadata">Export metadata</string>
<string name="mm">mm</string> <string name="mm">mm</string>
<string name="download_file_title">Sismics Docs file export</string> <string name="download_file_title">Teedy file export</string>
<string name="download_document_title">Sismics Docs document export</string> <string name="download_document_title">Teedy document export</string>
<string name="download_pdf_title">Sismics Docs PDF export</string> <string name="download_pdf_title">Teedy PDF export</string>
<string name="latest_activity">Latest activity</string> <string name="latest_activity">Latest activity</string>
<string name="activity">Activity</string> <string name="activity">Activity</string>
<string name="email">E-mail</string> <string name="email">E-mail</string>
@ -144,4 +145,19 @@
<string name="contributors">Contributors</string> <string name="contributors">Contributors</string>
<string name="relations">Relations</string> <string name="relations">Relations</string>
<!-- Audit log -->
<string name="auditlog_Acl">ACL</string>
<string name="auditlog_Comment">Comment</string>
<string name="auditlog_Document">Document</string>
<string name="auditlog_File">File</string>
<string name="auditlog_Group">Group</string>
<string name="auditlog_Route">Workflow</string>
<string name="auditlog_RouteModel">Workflow model</string>
<string name="auditlog_Tag">Tag</string>
<string name="auditlog_User">User</string>
<string name="auditlog_Webhook">Webhook</string>
<string name="auditlog_created">created</string>
<string name="auditlog_updated">updated</string>
<string name="auditlog_deleted">deleted</string>
</resources> </resources>

View File

@ -1,12 +1,12 @@
<resources> <resources>
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar"> <style name="AppTheme" parent="Theme.AppCompat.DayNight">
<item name="colorPrimary">@color/colorPrimary</item> <item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item> <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item> <item name="colorAccent">@color/colorAccent</item>
</style> </style>
<style name="AppTheme.NoActionBar" parent="Theme.AppCompat.Light.DarkActionBar"> <style name="AppTheme.NoActionBar" parent="Theme.AppCompat.DayNight.NoActionBar">
<item name="windowActionBar">false</item> <item name="windowActionBar">false</item>
<item name="windowNoTitle">true</item> <item name="windowNoTitle">true</item>
<item name="colorPrimary">@color/colorPrimary</item> <item name="colorPrimary">@color/colorPrimary</item>
@ -14,7 +14,7 @@
<item name="colorAccent">@color/colorAccent</item> <item name="colorAccent">@color/colorAccent</item>
</style> </style>
<style name="AppThemeDark" parent="Theme.AppCompat.NoActionBar"> <style name="AppThemeDark" parent="Theme.AppCompat.DayNight.NoActionBar">
<item name="colorPrimary">@color/colorPrimary</item> <item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item> <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item> <item name="colorAccent">@color/colorAccent</item>

View File

@ -1,6 +1,6 @@
#Thu Oct 18 22:37:49 CEST 2018 #Tue May 07 11:49:13 CEST 2019
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.6-all.zip distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<version>1.6-SNAPSHOT</version> <version>1.9-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@ -132,6 +132,11 @@
<artifactId>okhttp</artifactId> <artifactId>okhttp</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them --> <!-- Only there to read old index and rebuild them -->
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
@ -190,6 +195,25 @@
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
</dependency> </dependency>
<!-- JDK 11 JAXB dependencies -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-core</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-impl</artifactId>
<version>2.3.0</version>
</dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>

View File

@ -42,5 +42,20 @@ public enum ConfigType {
INBOX_PORT, INBOX_PORT,
INBOX_USERNAME, INBOX_USERNAME,
INBOX_PASSWORD, INBOX_PASSWORD,
INBOX_TAG INBOX_TAG,
INBOX_AUTOMATIC_TAGS,
INBOX_DELETE_IMPORTED,
/**
* LDAP connection.
*/
LDAP_ENABLED,
LDAP_HOST,
LDAP_PORT,
LDAP_ADMIN_DN,
LDAP_ADMIN_PASSWORD,
LDAP_BASE_DN,
LDAP_FILTER,
LDAP_DEFAULT_EMAIL,
LDAP_DEFAULT_STORAGE
} }

View File

@ -38,7 +38,7 @@ public class Constants {
/** /**
* Supported document languages. * Supported document languages.
*/ */
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor"); public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan");
/** /**
* Base URL environment variable. * Base URL environment variable.

View File

@ -0,0 +1,14 @@
package com.sismics.docs.core.constant;
/**
* Metadata type.
*
* @author bgamard
*/
public enum MetadataType {
STRING,
INTEGER,
FLOAT,
DATE,
BOOLEAN
}

View File

@ -27,7 +27,6 @@ public class AuditLogDao {
* *
* @param auditLog Audit log * @param auditLog Audit log
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(AuditLog auditLog) { public String create(AuditLog auditLog) {
// Create the UUID // Create the UUID
@ -47,10 +46,9 @@ public class AuditLogDao {
* @param paginatedList List of audit logs (updated by side effects) * @param paginatedList List of audit logs (updated by side effects)
* @param criteria Search criteria * @param criteria Search criteria
* @param sortCriteria Sort criteria * @param sortCriteria Sort criteria
* @return List of audit logs
*/ */
public void findByCriteria(PaginatedList<AuditLogDto> paginatedList, AuditLogCriteria criteria, SortCriteria sortCriteria) { public void findByCriteria(PaginatedList<AuditLogDto> paginatedList, AuditLogCriteria criteria, SortCriteria sortCriteria) {
Map<String, Object> parameterMap = new HashMap<String, Object>(); Map<String, Object> parameterMap = new HashMap<>();
StringBuilder baseQuery = new StringBuilder("select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, u.USE_USERNAME_C c2, l.LOG_IDENTITY_C c3, l.LOG_CLASSENTITY_C c4, l.LOG_TYPE_C c5, l.LOG_MESSAGE_C c6 from T_AUDIT_LOG l "); StringBuilder baseQuery = new StringBuilder("select l.LOG_ID_C c0, l.LOG_CREATEDATE_D c1, u.USE_USERNAME_C c2, l.LOG_IDENTITY_C c3, l.LOG_CLASSENTITY_C c4, l.LOG_TYPE_C c5, l.LOG_MESSAGE_C c6 from T_AUDIT_LOG l ");
baseQuery.append(" join T_USER u on l.LOG_IDUSER_C = u.USE_ID_C "); baseQuery.append(" join T_USER u on l.LOG_IDUSER_C = u.USE_ID_C ");
@ -63,22 +61,28 @@ public class AuditLogDao {
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) "); queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select c.COM_ID_C from T_COMMENT c where c.COM_IDDOC_C = :documentId) "); queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select c.COM_ID_C from T_COMMENT c where c.COM_IDDOC_C = :documentId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select a.ACL_ID_C from T_ACL a where a.ACL_SOURCEID_C = :documentId) "); queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select a.ACL_ID_C from T_ACL a where a.ACL_SOURCEID_C = :documentId) ");
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select r.RTE_ID_C from T_ROUTE r where r.RTE_IDDOCUMENT_C = :documentId) ");
parameterMap.put("documentId", criteria.getDocumentId()); parameterMap.put("documentId", criteria.getDocumentId());
} }
if (criteria.getUserId() != null) { if (criteria.getUserId() != null) {
if (criteria.isAdmin()) {
// For admin users, display all logs except ACL logs
queries.add(baseQuery + " where l.LOG_CLASSENTITY_C != 'Acl' ");
} else {
// Get all logs originating from the user, not necessarly on owned items // Get all logs originating from the user, not necessarly on owned items
// Filter out ACL logs // Filter out ACL logs
queries.add(baseQuery + " where l.LOG_IDUSER_C = :userId and l.LOG_CLASSENTITY_C != 'Acl' "); queries.add(baseQuery + " where l.LOG_IDUSER_C = :userId and l.LOG_CLASSENTITY_C != 'Acl' ");
parameterMap.put("userId", criteria.getUserId()); parameterMap.put("userId", criteria.getUserId());
} }
}
// Perform the search // Perform the search
QueryParam queryParam = new QueryParam(Joiner.on(" union ").join(queries), parameterMap); QueryParam queryParam = new QueryParam(Joiner.on(" union ").join(queries), parameterMap);
List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria); List<Object[]> l = PaginatedLists.executePaginatedQuery(paginatedList, queryParam, sortCriteria);
// Assemble results // Assemble results
List<AuditLogDto> auditLogDtoList = new ArrayList<AuditLogDto>(); List<AuditLogDto> auditLogDtoList = new ArrayList<>();
for (Object[] o : l) { for (Object[] o : l) {
int i = 0; int i = 0;
AuditLogDto auditLogDto = new AuditLogDto(); AuditLogDto auditLogDto = new AuditLogDto();

View File

@ -27,7 +27,6 @@ public class CommentDao {
* @param comment Comment * @param comment Comment
* @param userId User ID * @param userId User ID
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(Comment comment, String userId) { public String create(Comment comment, String userId) {
// Create the UUID // Create the UUID
@ -99,7 +98,7 @@ public class CommentDao {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
List<Object[]> l = q.getResultList(); List<Object[]> l = q.getResultList();
List<CommentDto> commentDtoList = new ArrayList<CommentDto>(); List<CommentDto> commentDtoList = new ArrayList<>();
for (Object[] o : l) { for (Object[] o : l) {
int i = 0; int i = 0;
CommentDto commentDto = new CommentDto(); CommentDto commentDto = new CommentDto();
@ -107,7 +106,7 @@ public class CommentDao {
commentDto.setContent((String) o[i++]); commentDto.setContent((String) o[i++]);
commentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime()); commentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
commentDto.setCreatorName((String) o[i++]); commentDto.setCreatorName((String) o[i++]);
commentDto.setCreatorEmail((String) o[i++]); commentDto.setCreatorEmail((String) o[i]);
commentDtoList.add(commentDto); commentDtoList.add(commentDto);
} }
return commentDtoList; return commentDtoList;

View File

@ -56,7 +56,7 @@ public class ContributorDao {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public List<ContributorDto> getByDocumentId(String documentId) { public List<ContributorDto> getByDocumentId(String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c "); StringBuilder sb = new StringBuilder("select distinct u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
sb.append(" join T_USER u on u.USE_ID_C = c.CTR_IDUSER_C "); sb.append(" join T_USER u on u.USE_ID_C = c.CTR_IDUSER_C ");
sb.append(" where c.CTR_IDDOC_C = :documentId "); sb.append(" where c.CTR_IDDOC_C = :documentId ");
Query q = em.createNativeQuery(sb.toString()); Query q = em.createNativeQuery(sb.toString());

View File

@ -46,12 +46,16 @@ public class DocumentDao {
/** /**
* Returns the list of all active documents. * Returns the list of all active documents.
* *
* @param offset Offset
* @param limit Limit
* @return List of documents * @return List of documents
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public List<Document> findAll() { public List<Document> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.deleteDate is null"); Query q = em.createQuery("select d from Document d where d.deleteDate is null");
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
} }
@ -185,7 +189,7 @@ public class DocumentDao {
} }
/** /**
* Update a document. * Update a document and log the action.
* *
* @param document Document to update * @param document Document to update
* @param userId User ID * @param userId User ID
@ -212,6 +216,7 @@ public class DocumentDao {
documentDb.setRights(document.getRights()); documentDb.setRights(document.getRights());
documentDb.setCreateDate(document.getCreateDate()); documentDb.setCreateDate(document.getCreateDate());
documentDb.setLanguage(document.getLanguage()); documentDb.setLanguage(document.getLanguage());
documentDb.setFileId(document.getFileId());
documentDb.setUpdateDate(new Date()); documentDb.setUpdateDate(new Date());
// Create audit log // Create audit log
@ -220,6 +225,21 @@ public class DocumentDao {
return documentDb; return documentDb;
} }
/**
* Update the file ID on a document.
*
* @param document Document
*/
public void updateFileId(Document document) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query query = em.createNativeQuery("update T_DOCUMENT d set DOC_IDFILE_C = :fileId, DOC_UPDATEDATE_D = :updateDate where d.DOC_ID_C = :id");
query.setParameter("updateDate", new Date());
query.setParameter("fileId", document.getFileId());
query.setParameter("id", document.getId());
query.executeUpdate();
}
/** /**
* Returns the number of documents. * Returns the number of documents.
* *

View File

@ -0,0 +1,89 @@
package com.sismics.docs.core.dao;
import com.sismics.docs.core.constant.MetadataType;
import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* Document metadata DAO.
*
* @author bgamard
*/
public class DocumentMetadataDao {
/**
* Creates a new document metadata.
*
* @param documentMetadata Document metadata
* @return New ID
*/
public String create(DocumentMetadata documentMetadata) {
// Create the UUID
documentMetadata.setId(UUID.randomUUID().toString());
// Create the document metadata
EntityManager em = ThreadLocalContext.get().getEntityManager();
em.persist(documentMetadata);
return documentMetadata.getId();
}
/**
* Updates a document metadata.
*
* @param documentMetadata Document metadata
* @return Updated document metadata
*/
public DocumentMetadata update(DocumentMetadata documentMetadata) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document metadata
Query q = em.createQuery("select u from DocumentMetadata u where u.id = :id");
q.setParameter("id", documentMetadata.getId());
DocumentMetadata documentMetadataDb = (DocumentMetadata) q.getSingleResult();
// Update the document metadata
documentMetadataDb.setValue(documentMetadata.getValue());
return documentMetadata;
}
/**
* Returns the list of all metadata values on a document.
*
* @param documentId Document ID
* @return List of metadata
*/
@SuppressWarnings("unchecked")
public List<DocumentMetadataDto> getByDocumentId(String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select dm.DME_ID_C, dm.DME_IDDOCUMENT_C, dm.DME_IDMETADATA_C, dm.DME_VALUE_C, m.MET_TYPE_C");
sb.append(" from T_DOCUMENT_METADATA dm, T_METADATA m ");
sb.append(" where dm.DME_IDMETADATA_C = m.MET_ID_C and dm.DME_IDDOCUMENT_C = :documentId and m.MET_DELETEDATE_D is null");
// Perform the search
Query q = em.createNativeQuery(sb.toString());
q.setParameter("documentId", documentId);
List<Object[]> l = q.getResultList();
// Assemble results
List<DocumentMetadataDto> dtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
DocumentMetadataDto dto = new DocumentMetadataDto();
dto.setId((String) o[i++]);
dto.setDocumentId((String) o[i++]);
dto.setMetadataId((String) o[i++]);
dto.setValue((String) o[i++]);
dto.setType(MetadataType.valueOf((String) o[i]));
dtoList.add(dto);
}
return dtoList;
}
}

View File

@ -43,12 +43,16 @@ public class FileDao {
/** /**
* Returns the list of all files. * Returns the list of all files.
* *
* @param offset Offset
* @param limit Limit
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public List<File> findAll() { public List<File> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.deleteDate is null"); Query q = em.createQuery("select f from File f where f.deleteDate is null");
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
} }
@ -144,6 +148,8 @@ public class FileDao {
fileDb.setContent(file.getContent()); fileDb.setContent(file.getContent());
fileDb.setOrder(file.getOrder()); fileDb.setOrder(file.getOrder());
fileDb.setMimeType(file.getMimeType()); fileDb.setMimeType(file.getMimeType());
fileDb.setVersionId(file.getVersionId());
fileDb.setLatestVersion(file.isLatestVersion());
return file; return file;
} }
@ -176,12 +182,26 @@ public class FileDao {
public List<File> getByDocumentId(String userId, String documentId) { public List<File> getByDocumentId(String userId, String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
if (documentId == null) { if (documentId == null) {
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.userId = :userId order by f.createDate asc"); Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc");
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} }
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.deleteDate is null order by f.order asc"); Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc");
q.setParameter("documentId", documentId); q.setParameter("documentId", documentId);
return q.getResultList(); return q.getResultList();
} }
/**
* Get all files from a version.
*
* @param versionId Version ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> getByVersionId(String versionId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc");
q.setParameter("versionId", versionId);
return q.getResultList();
}
} }

View File

@ -0,0 +1,148 @@
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.constant.MetadataType;
import com.sismics.docs.core.dao.criteria.MetadataCriteria;
import com.sismics.docs.core.dao.dto.MetadataDto;
import com.sismics.docs.core.model.jpa.Metadata;
import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import java.util.*;
/**
* Metadata DAO.
*
* @author bgamard
*/
public class MetadataDao {
/**
* Creates a new metdata.
*
* @param metadata Metadata
* @param userId User ID
* @return New ID
*/
public String create(Metadata metadata, String userId) {
// Create the UUID
metadata.setId(UUID.randomUUID().toString());
// Create the metadata
EntityManager em = ThreadLocalContext.get().getEntityManager();
em.persist(metadata);
// Create audit log
AuditLogUtil.create(metadata, AuditLogType.CREATE, userId);
return metadata.getId();
}
/**
* Update a metadata.
*
* @param metadata Metadata to update
* @param userId User ID
* @return Updated metadata
*/
public Metadata update(Metadata metadata, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the metadata
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
q.setParameter("id", metadata.getId());
Metadata metadataDb = (Metadata) q.getSingleResult();
// Update the metadata
metadataDb.setName(metadata.getName());
// Create audit log
AuditLogUtil.create(metadataDb, AuditLogType.UPDATE, userId);
return metadataDb;
}
/**
* Gets an active metadata by its ID.
*
* @param id Metadata ID
* @return Metadata
*/
public Metadata getActiveById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
try {
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
q.setParameter("id", id);
return (Metadata) q.getSingleResult();
} catch (NoResultException e) {
return null;
}
}
/**
* Deletes a metadata.
*
* @param id Metadata ID
* @param userId User ID
*/
public void delete(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the metadata
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
q.setParameter("id", id);
Metadata metadataDb = (Metadata) q.getSingleResult();
// Delete the metadata
Date dateNow = new Date();
metadataDb.setDeleteDate(dateNow);
// Create audit log
AuditLogUtil.create(metadataDb, AuditLogType.DELETE, userId);
}
/**
* Returns the list of all metadata.
*
* @param criteria Search criteria
* @param sortCriteria Sort criteria
* @return List of metadata
*/
public List<MetadataDto> findByCriteria(MetadataCriteria criteria, SortCriteria sortCriteria) {
Map<String, Object> parameterMap = new HashMap<>();
List<String> criteriaList = new ArrayList<>();
StringBuilder sb = new StringBuilder("select m.MET_ID_C c0, m.MET_NAME_C c1, m.MET_TYPE_C c2");
sb.append(" from T_METADATA m ");
criteriaList.add("m.MET_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked")
List<Object[]> l = QueryUtil.getNativeQuery(queryParam).getResultList();
// Assemble results
List<MetadataDto> dtoList = new ArrayList<>();
for (Object[] o : l) {
int i = 0;
MetadataDto dto = new MetadataDto();
dto.setId((String) o[i++]);
dto.setName((String) o[i++]);
dto.setType(MetadataType.valueOf((String) o[i]));
dtoList.add(dto);
}
return dtoList;
}
}

View File

@ -36,13 +36,13 @@ public class RelationDao {
List<Object[]> l = q.getResultList(); List<Object[]> l = q.getResultList();
// Assemble results // Assemble results
List<RelationDto> relationDtoList = new ArrayList<RelationDto>(); List<RelationDto> relationDtoList = new ArrayList<>();
for (Object[] o : l) { for (Object[] o : l) {
int i = 0; int i = 0;
RelationDto relationDto = new RelationDto(); RelationDto relationDto = new RelationDto();
relationDto.setId((String) o[i++]); relationDto.setId((String) o[i++]);
relationDto.setTitle((String) o[i++]); relationDto.setTitle((String) o[i++]);
String fromDocId = (String) o[i++]; String fromDocId = (String) o[i];
relationDto.setSource(documentId.equals(fromDocId)); relationDto.setSource(documentId.equals(fromDocId));
relationDtoList.add(relationDto); relationDtoList.add(relationDto);
} }

View File

@ -91,10 +91,15 @@ public class RouteDao {
* Deletes a route and the associated steps. * Deletes a route and the associated steps.
* *
* @param routeId Route ID * @param routeId Route ID
* @param userId User ID
*/ */
public void deleteRoute(String routeId) { public void deleteRoute(String routeId, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Create audit log
Route route = em.find(Route.class, routeId);
AuditLogUtil.create(route, AuditLogType.DELETE, userId);
em.createNativeQuery("update T_ROUTE_STEP rs set RTP_DELETEDATE_D = :dateNow where rs.RTP_IDROUTE_C = :routeId and rs.RTP_DELETEDATE_D is null") em.createNativeQuery("update T_ROUTE_STEP rs set RTP_DELETEDATE_D = :dateNow where rs.RTP_IDROUTE_C = :routeId and rs.RTP_DELETEDATE_D is null")
.setParameter("routeId", routeId) .setParameter("routeId", routeId)
.setParameter("dateNow", new Date()) .setParameter("dateNow", new Date())

View File

@ -6,6 +6,7 @@ import com.sismics.docs.core.dao.criteria.RouteModelCriteria;
import com.sismics.docs.core.dao.dto.RouteModelDto; import com.sismics.docs.core.dao.dto.RouteModelDto;
import com.sismics.docs.core.model.jpa.RouteModel; import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.docs.core.util.AuditLogUtil; import com.sismics.docs.core.util.AuditLogUtil;
import com.sismics.docs.core.util.SecurityUtil;
import com.sismics.docs.core.util.jpa.QueryParam; import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.QueryUtil; import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria; import com.sismics.docs.core.util.jpa.SortCriteria;
@ -60,7 +61,7 @@ public class RouteModelDao {
q.setParameter("id", routeModel.getId()); q.setParameter("id", routeModel.getId());
RouteModel routeModelDb = (RouteModel) q.getSingleResult(); RouteModel routeModelDb = (RouteModel) q.getSingleResult();
// Update the group // Update the route model
routeModelDb.setName(routeModel.getName()); routeModelDb.setName(routeModel.getName());
routeModelDb.setSteps(routeModel.getSteps()); routeModelDb.setSteps(routeModel.getSteps());
@ -87,6 +88,18 @@ public class RouteModelDao {
} }
} }
/**
* Returns the list of all route models.
*
* @return List of route models
*/
@SuppressWarnings("unchecked")
public List<RouteModel> findAll() {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select r from RouteModel r where r.deleteDate is null");
return q.getResultList();
}
/** /**
* Deletes a route model. * Deletes a route model.
* *
@ -124,6 +137,12 @@ public class RouteModelDao {
sb.append(" from T_ROUTE_MODEL rm "); sb.append(" from T_ROUTE_MODEL rm ");
// Add search criterias // Add search criterias
if (criteria.getTargetIdList() != null && !SecurityUtil.skipAclCheck(criteria.getTargetIdList())) {
sb.append(" left join T_ACL a on a.ACL_TARGETID_C in (:targetIdList) and a.ACL_SOURCEID_C = rm.RTM_ID_C and a.ACL_PERM_C = 'READ' and a.ACL_DELETEDATE_D is null ");
criteriaList.add("a.ACL_ID_C is not null");
parameterMap.put("targetIdList", criteria.getTargetIdList());
}
criteriaList.add("rm.RTM_DELETEDATE_D is null"); criteriaList.add("rm.RTM_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { if (!criteriaList.isEmpty()) {

View File

@ -171,6 +171,26 @@ public class UserDao {
return user; return user;
} }
/**
* Update the onboarding status.
*
* @param user User to update
* @return Updated user
*/
public User updateOnboarding(User user) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the user
Query q = em.createQuery("select u from User u where u.id = :id and u.deleteDate is null");
q.setParameter("id", user.getId());
User userDb = (User) q.getSingleResult();
// Update the user
userDb.setOnboarding(user.isOnboarding());
return user;
}
/** /**
* Gets a user by its ID. * Gets a user by its ID.
* *

View File

@ -17,6 +17,11 @@ public class AuditLogCriteria {
*/ */
private String userId; private String userId;
/**
* The search is done for an admin user.
*/
private boolean isAdmin = false;
public String getDocumentId() { public String getDocumentId() {
return documentId; return documentId;
} }
@ -32,4 +37,13 @@ public class AuditLogCriteria {
public void setUserId(String userId) { public void setUserId(String userId) {
this.userId = userId; this.userId = userId;
} }
public boolean isAdmin() {
return isAdmin;
}
public AuditLogCriteria setAdmin(boolean admin) {
isAdmin = admin;
return this;
}
} }

View File

@ -51,6 +51,12 @@ public class DocumentCriteria {
*/ */
private List<List<String>> tagIdList; private List<List<String>> tagIdList;
/**
* Tag IDs to excluded.
* The first and second level list will be excluded.
*/
private List<List<String>> excludedTagIdList;
/** /**
* Shared status. * Shared status.
*/ */
@ -71,6 +77,11 @@ public class DocumentCriteria {
*/ */
private Boolean activeRoute; private Boolean activeRoute;
/**
* MIME type of a file.
*/
private String mimeType;
public List<String> getTargetIdList() { public List<String> getTargetIdList() {
return targetIdList; return targetIdList;
} }
@ -119,6 +130,15 @@ public class DocumentCriteria {
this.tagIdList = tagIdList; this.tagIdList = tagIdList;
} }
public List<List<String>> getExcludedTagIdList() {
return excludedTagIdList;
}
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
this.excludedTagIdList = excludedTagIdList;
return this;
}
public Boolean getShared() { public Boolean getShared() {
return shared; return shared;
} }
@ -166,4 +186,12 @@ public class DocumentCriteria {
public void setActiveRoute(Boolean activeRoute) { public void setActiveRoute(Boolean activeRoute) {
this.activeRoute = activeRoute; this.activeRoute = activeRoute;
} }
public String getMimeType() {
return mimeType;
}
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
} }

View File

@ -0,0 +1,9 @@
package com.sismics.docs.core.dao.criteria;
/**
* Metadata criteria.
*
* @author bgamard
*/
public class MetadataCriteria {
}

View File

@ -1,10 +1,25 @@
package com.sismics.docs.core.dao.criteria; package com.sismics.docs.core.dao.criteria;
import java.util.List;
/** /**
* Route model criteria. * Route model criteria.
* *
* @author bgamard * @author bgamard
*/ */
public class RouteModelCriteria { public class RouteModelCriteria {
/**
* ACL target ID list.
*/
private List<String> targetIdList;
public List<String> getTargetIdList() {
return targetIdList;
}
public RouteModelCriteria setTargetIdList(List<String> targetIdList) {
this.targetIdList = targetIdList;
return this;
}
} }

View File

@ -23,11 +23,6 @@ public class TagCriteria {
*/ */
private String documentId; private String documentId;
/**
* Tag name.
*/
private String name;
public String getId() { public String getId() {
return id; return id;
} }

View File

@ -11,6 +11,11 @@ public class DocumentDto {
*/ */
private String id; private String id;
/**
* Main file ID.
*/
private String fileId;
/** /**
* Title. * Title.
*/ */
@ -114,6 +119,15 @@ public class DocumentDto {
this.id = id; this.id = id;
} }
public String getFileId() {
return fileId;
}
public DocumentDto setFileId(String fileId) {
this.fileId = fileId;
return this;
}
public String getTitle() { public String getTitle() {
return title; return title;
} }

View File

@ -0,0 +1,94 @@
package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.MetadataType;
/**
* Document metadata DTO.
*
* @author bgamard
*/
public class DocumentMetadataDto {
/**
* Document metadata ID.
*/
private String id;
/**
* Document ID.
*/
private String documentId;
/**
* Metadata ID.
*/
private String metadataId;
/**
* Name.
*/
private String name;
/**
* Value.
*/
private String value;
/**
* Type.
*/
private MetadataType type;
public String getId() {
return id;
}
public DocumentMetadataDto setId(String id) {
this.id = id;
return this;
}
public String getName() {
return name;
}
public DocumentMetadataDto setName(String name) {
this.name = name;
return this;
}
public MetadataType getType() {
return type;
}
public DocumentMetadataDto setType(MetadataType type) {
this.type = type;
return this;
}
public String getDocumentId() {
return documentId;
}
public DocumentMetadataDto setDocumentId(String documentId) {
this.documentId = documentId;
return this;
}
public String getMetadataId() {
return metadataId;
}
public DocumentMetadataDto setMetadataId(String metadataId) {
this.metadataId = metadataId;
return this;
}
public String getValue() {
return value;
}
public DocumentMetadataDto setValue(String value) {
this.value = value;
return this;
}
}

View File

@ -0,0 +1,52 @@
package com.sismics.docs.core.dao.dto;
import com.sismics.docs.core.constant.MetadataType;
/**
* Metadata DTO.
*
* @author bgamard
*/
public class MetadataDto {
/**
* Metadata ID.
*/
private String id;
/**
* Name.
*/
private String name;
/**
* Type.
*/
private MetadataType type;
public String getId() {
return id;
}
public MetadataDto setId(String id) {
this.id = id;
return this;
}
public String getName() {
return name;
}
public MetadataDto setName(String name) {
this.name = name;
return this;
}
public MetadataType getType() {
return type;
}
public MetadataDto setType(MetadataType type) {
this.type = type;
return this;
}
}

View File

@ -1,7 +1,6 @@
package com.sismics.docs.core.event; package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.Document;
/** /**
* Document created event. * Document created event.
@ -10,32 +9,22 @@ import com.sismics.docs.core.model.jpa.Document;
*/ */
public class DocumentCreatedAsyncEvent extends UserEvent { public class DocumentCreatedAsyncEvent extends UserEvent {
/** /**
* Created document. * Document ID.
*/ */
private Document document; private String documentId;
/** public String getDocumentId() {
* Getter of document. return documentId;
*
* @return the document
*/
public Document getDocument() {
return document;
} }
/** public void setDocumentId(String documentId) {
* Setter of document. this.documentId = documentId;
*
* @param document document
*/
public void setDocument(Document document) {
this.document = document;
} }
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)
.add("document", document) .add("documentId", documentId)
.toString(); .toString();
} }
} }

View File

@ -1,7 +1,6 @@
package com.sismics.docs.core.event; package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
/** /**
* File deleted event. * File deleted event.
@ -10,22 +9,22 @@ import com.sismics.docs.core.model.jpa.File;
*/ */
public class FileDeletedAsyncEvent extends UserEvent { public class FileDeletedAsyncEvent extends UserEvent {
/** /**
* Deleted file. * File ID.
*/ */
private File file; private String fileId;
public File getFile() { public String getFileId() {
return file; return fileId;
} }
public void setFile(File file) { public void setFileId(String fileId) {
this.file = file; this.fileId = fileId;
} }
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)
.add("file", file) .add("fileId", fileId)
.toString(); .toString();
} }
} }

View File

@ -1,7 +1,6 @@
package com.sismics.docs.core.event; package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
import java.nio.file.Path; import java.nio.file.Path;
@ -12,9 +11,9 @@ import java.nio.file.Path;
*/ */
public abstract class FileEvent extends UserEvent { public abstract class FileEvent extends UserEvent {
/** /**
* Created file. * File ID.
*/ */
private File file; private String fileId;
/** /**
* Language of the file. * Language of the file.
@ -26,12 +25,12 @@ public abstract class FileEvent extends UserEvent {
*/ */
private Path unencryptedFile; private Path unencryptedFile;
public File getFile() { public String getFileId() {
return file; return fileId;
} }
public void setFile(File file) { public void setFileId(String fileId) {
this.file = file; this.fileId = fileId;
} }
public String getLanguage() { public String getLanguage() {
@ -54,7 +53,7 @@ public abstract class FileEvent extends UserEvent {
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)
.add("file", file) .add("fileId", fileId)
.add("language", language) .add("language", language)
.toString(); .toString();
} }

View File

@ -3,9 +3,11 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents; import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe; import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.ContributorDao; import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent; import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor; import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.TransactionUtil; import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -34,15 +36,22 @@ public class DocumentCreatedAsyncListener {
} }
TransactionUtil.handle(() -> { TransactionUtil.handle(() -> {
// Fetch a fresh document
Document document = new DocumentDao().getById(event.getDocumentId());
if (document == null) {
// The document has been deleted since
return;
}
// Add the first contributor (the creator of the document) // Add the first contributor (the creator of the document)
ContributorDao contributorDao = new ContributorDao(); ContributorDao contributorDao = new ContributorDao();
Contributor contributor = new Contributor(); Contributor contributor = new Contributor();
contributor.setDocumentId(event.getDocument().getId()); contributor.setDocumentId(event.getDocumentId());
contributor.setUserId(event.getUserId()); contributor.setUserId(event.getUserId());
contributorDao.create(contributor); contributorDao.create(contributor);
// Update index // Update index
AppContext.getInstance().getIndexingHandler().createDocument(event.getDocument()); AppContext.getInstance().getIndexingHandler().createDocument(document);
}); });
} }
} }

View File

@ -4,10 +4,12 @@ import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe; import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.ContributorDao; import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao; import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.FileDao;
import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent; import com.sismics.docs.core.event.DocumentUpdatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor; import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.model.jpa.Document; import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.TransactionUtil; import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -38,13 +40,25 @@ public class DocumentUpdatedAsyncListener {
} }
TransactionUtil.handle(() -> { TransactionUtil.handle(() -> {
// Update index // Get the document
DocumentDao documentDao = new DocumentDao(); DocumentDao documentDao = new DocumentDao();
Document document = documentDao.getById(event.getDocumentId()); Document document = documentDao.getById(event.getDocumentId());
if (document == null) { if (document == null) {
// Document deleted since event fired // Document deleted since event fired
return; return;
} }
// Set the main file
FileDao fileDao = new FileDao();
List<File> fileList = fileDao.getByDocumentId(null, event.getDocumentId());
if (fileList.isEmpty()) {
document.setFileId(null);
} else {
document.setFileId(fileList.get(0).getId());
}
// Update database and index
documentDao.updateFileId(document);
AppContext.getInstance().getIndexingHandler().updateDocument(document); AppContext.getInstance().getIndexingHandler().updateDocument(document);
// Update contributors list // Update contributors list

View File

@ -4,7 +4,6 @@ import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe; import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.event.FileDeletedAsyncEvent; import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.FileUtil; import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil; import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -35,12 +34,11 @@ public class FileDeletedAsyncListener {
} }
// Delete the file from storage // Delete the file from storage
File file = event.getFile(); FileUtil.delete(event.getFileId());
FileUtil.delete(file);
TransactionUtil.handle(() -> { TransactionUtil.handle(() -> {
// Update index // Update index
AppContext.getInstance().getIndexingHandler().deleteDocument(file.getId()); AppContext.getInstance().getIndexingHandler().deleteDocument(event.getFileId());
}); });
} }
} }

View File

@ -28,6 +28,7 @@ import java.io.OutputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.concurrent.atomic.AtomicReference;
/** /**
* Listener on file processing. * Listener on file processing.
@ -52,15 +53,7 @@ public class FileProcessingAsyncListener {
log.info("File created event: " + event.toString()); log.info("File created event: " + event.toString());
} }
TransactionUtil.handle(() -> { processFile(event, true);
// Generate thumbnail, extract content
processFile(event);
// Update index
AppContext.getInstance().getIndexingHandler().createFile(event.getFile());
});
FileUtil.endProcessingFile(event.getFile().getId());
} }
/** /**
@ -71,44 +64,85 @@ public class FileProcessingAsyncListener {
@Subscribe @Subscribe
@AllowConcurrentEvents @AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) { public void on(final FileUpdatedAsyncEvent event) {
if (log.isInfoEnabled()) {
log.info("File updated event: " + event.toString()); log.info("File updated event: " + event.toString());
}
TransactionUtil.handle(() -> { processFile(event, false);
// Generate thumbnail, extract content
processFile(event);
// Update index
AppContext.getInstance().getIndexingHandler().updateFile(event.getFile());
});
FileUtil.endProcessingFile(event.getFile().getId());
} }
/** /**
* Process the file (create/update). * Process a file :
* Generate thumbnails
* Extract and save text content
* *
* @param event File event * @param event File event
* @param isFileCreated True if the file was just created
*/ */
private void processFile(FileEvent event) { private void processFile(FileEvent event, boolean isFileCreated) {
// Find a format handler AtomicReference<File> file = new AtomicReference<>();
final File file = event.getFile(); AtomicReference<User> user = new AtomicReference<>();
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
if (formatHandler == null) { // Open a first transaction to get what we need to start the processing
log.error("Format unhandled: " + file.getMimeType()); TransactionUtil.handle(() -> {
FileUtil.endProcessingFile(file.getId()); // Generate thumbnail, extract content
file.set(new FileDao().getActiveById(event.getFileId()));
if (file.get() == null) {
// The file has been deleted since
return; return;
} }
// Get the user from the database // Get the creating user from the database for its private key
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
User user = userDao.getById(event.getUserId()); user.set(userDao.getById(file.get().getUserId()));
if (user == null) { });
// The user has been deleted meanwhile
FileUtil.endProcessingFile(file.getId()); // Process the file outside of a transaction
if (user.get() == null || file.get() == null) {
// The user or file has been deleted
FileUtil.endProcessingFile(event.getFileId());
return; return;
} }
String content = extractContent(event, user.get(), file.get());
// Open a new transaction to save the file content
TransactionUtil.handle(() -> {
// Save the file to database
FileDao fileDao = new FileDao();
File freshFile = fileDao.getActiveById(event.getFileId());
if (freshFile == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
freshFile.setContent(content);
fileDao.update(freshFile);
// Update index with the updated file
if (isFileCreated) {
AppContext.getInstance().getIndexingHandler().createFile(freshFile);
} else {
AppContext.getInstance().getIndexingHandler().updateFile(freshFile);
}
});
FileUtil.endProcessingFile(event.getFileId());
}
/**
* Extract text content from a file.
* This is executed outside of a transaction.
*
* @param event File event
* @param user User whom created the file
* @param file Fresh file
* @return Text content
*/
private String extractContent(FileEvent event, User user, File file) {
// Find a format handler
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
if (formatHandler == null) {
log.info("Format unhandled: " + file.getMimeType());
return null;
}
// Generate file variations // Generate file variations
try { try {
@ -132,28 +166,21 @@ public class FileProcessingAsyncListener {
ImageUtil.writeJpeg(thumbnail, outputStream); ImageUtil.writeJpeg(thumbnail, outputStream);
} }
} }
} catch (Exception e) { } catch (Throwable e) {
log.error("Unable to generate thumbnails", e); log.error("Unable to generate thumbnails for: " + file, e);
} }
// Extract text content from the file // Extract text content from the file
long startTime = System.currentTimeMillis(); long startTime = System.currentTimeMillis();
String content = null; String content = null;
log.info("Start extracting content from: " + file);
try { try {
content = formatHandler.extractContent(event.getLanguage(), event.getUnencryptedFile()); content = formatHandler.extractContent(event.getLanguage(), event.getUnencryptedFile());
} catch (Exception e) { } catch (Throwable e) {
log.error("Error extracting content from: " + event.getFile(), e); log.error("Error extracting content from: " + file, e);
} }
log.info(MessageFormat.format("File content extracted in {0}ms", System.currentTimeMillis() - startTime)); log.info(MessageFormat.format("File content extracted in {0}ms: " + file.getId(), System.currentTimeMillis() - startTime));
// Save the file to database return content;
FileDao fileDao = new FileDao();
if (fileDao.getActiveById(file.getId()) == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
file.setContent(content);
fileDao.update(file);
} }
} }

View File

@ -37,18 +37,35 @@ public class RebuildIndexAsyncListener {
log.info("Rebuild index event: " + event.toString()); log.info("Rebuild index event: " + event.toString());
} }
// Fetch all documents and files // Clear the index
AppContext.getInstance().getIndexingHandler().clearIndex();
// Index all documents
TransactionUtil.handle(() -> { TransactionUtil.handle(() -> {
// Fetch all documents int offset = 0;
DocumentDao documentDao = new DocumentDao(); DocumentDao documentDao = new DocumentDao();
List<Document> documentList = documentDao.findAll(); List<Document> documentList;
do {
// Fetch all files documentList = documentDao.findAll(offset, 100);
FileDao fileDao = new FileDao(); AppContext.getInstance().getIndexingHandler().createDocuments(documentList);
List<File> fileList = fileDao.findAll(); offset += 100;
} while (documentList.size() > 0);
// Rebuild index
AppContext.getInstance().getIndexingHandler().rebuildIndex(documentList, fileList);
}); });
// Index all files
TransactionUtil.handle(() -> {
int offset = 0;
FileDao fileDao = new FileDao();
List<File> fileList;
do {
fileList = fileDao.findAll(offset, 100);
AppContext.getInstance().getIndexingHandler().createFiles(fileList);
offset += 100;
} while (fileList.size() > 0);
});
if (log.isInfoEnabled()) {
log.info("Rebuilding index done");
}
} }
} }

View File

@ -36,7 +36,7 @@ public class WebhookAsyncListener {
@Subscribe @Subscribe
@AllowConcurrentEvents @AllowConcurrentEvents
public void on(final DocumentCreatedAsyncEvent event) { public void on(final DocumentCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocument().getId()); triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocumentId());
} }
@Subscribe @Subscribe
@ -54,19 +54,19 @@ public class WebhookAsyncListener {
@Subscribe @Subscribe
@AllowConcurrentEvents @AllowConcurrentEvents
public void on(final FileCreatedAsyncEvent event) { public void on(final FileCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFile().getId()); triggerWebhook(WebhookEvent.FILE_CREATED, event.getFileId());
} }
@Subscribe @Subscribe
@AllowConcurrentEvents @AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) { public void on(final FileUpdatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFile().getId()); triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFileId());
} }
@Subscribe @Subscribe
@AllowConcurrentEvents @AllowConcurrentEvents
public void on(final FileDeletedAsyncEvent event) { public void on(final FileDeletedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFile().getId()); triggerWebhook(WebhookEvent.FILE_DELETED, event.getFileId());
} }
/** /**

View File

@ -5,7 +5,6 @@ import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus; import com.google.common.eventbus.EventBus;
import com.sismics.docs.core.constant.Constants; import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.UserDao; import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.listener.async.*; import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User; import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService; import com.sismics.docs.core.service.FileService;
@ -89,9 +88,7 @@ public class AppContext {
} }
indexingHandler.startUp(); indexingHandler.startUp();
} catch (Exception e) { } catch (Exception e) {
log.error("Error starting the indexing handler, rebuilding the index: " + e.getMessage()); log.error("Error starting the indexing handler", e);
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
asyncEventBus.post(rebuildIndexAsyncEvent);
} }
// Start file service // Start file service
@ -174,7 +171,8 @@ public class AppContext {
if (EnvironmentUtil.isUnitTest()) { if (EnvironmentUtil.isUnitTest()) {
return new EventBus(); return new EventBus();
} else { } else {
ThreadPoolExecutor executor = new ThreadPoolExecutor(8, 8, int threadCount = Math.max(Runtime.getRuntime().availableProcessors() / 2, 2);
ThreadPoolExecutor executor = new ThreadPoolExecutor(threadCount, threadCount,
1L, TimeUnit.MINUTES, 1L, TimeUnit.MINUTES,
new LinkedBlockingQueue<>()); new LinkedBlockingQueue<>());
asyncExecutorList.add(executor); asyncExecutorList.add(executor);
@ -190,7 +188,7 @@ public class AppContext {
public int getQueuedTaskCount() { public int getQueuedTaskCount() {
int queueSize = 0; int queueSize = 0;
for (ThreadPoolExecutor executor : asyncExecutorList) { for (ThreadPoolExecutor executor : asyncExecutorList) {
queueSize += executor.getQueue().size(); queueSize += executor.getTaskCount() - executor.getCompletedTaskCount();
} }
return queueSize; return queueSize;
} }

View File

@ -29,6 +29,12 @@ public class Document implements Loggable {
@Column(name = "DOC_IDUSER_C", nullable = false, length = 36) @Column(name = "DOC_IDUSER_C", nullable = false, length = 36)
private String userId; private String userId;
/**
* Main file ID.
*/
@Column(name = "DOC_IDFILE_C", length = 36)
private String fileId;
/** /**
* Language (ISO 639-9). * Language (ISO 639-9).
*/ */
@ -137,6 +143,15 @@ public class Document implements Loggable {
this.userId = userId; this.userId = userId;
} }
public String getFileId() {
return fileId;
}
public Document setFileId(String fileId) {
this.fileId = fileId;
return this;
}
public String getTitle() { public String getTitle() {
return title; return title;
} }

View File

@ -0,0 +1,91 @@
package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
/**
* Link between a document and a metadata, holding the value.
*
* @author bgamard
*/
@Entity
@Table(name = "T_DOCUMENT_METADATA")
public class DocumentMetadata implements Serializable {
/**
* Serial version UID.
*/
private static final long serialVersionUID = 1L;
/**
* Document metadata ID.
*/
@Id
@Column(name = "DME_ID_C", length = 36)
private String id;
/**
* Document ID.
*/
@Column(name = "DME_IDDOCUMENT_C", nullable = false, length = 36)
private String documentId;
/**
* Metadata ID.
*/
@Column(name = "DME_IDMETADATA_C", nullable = false, length = 36)
private String metadataId;
/**
* Value.
*/
@Column(name = "DME_VALUE_C", length = 4000)
private String value;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDocumentId() {
return documentId;
}
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
public String getMetadataId() {
return metadataId;
}
public DocumentMetadata setMetadataId(String metadataId) {
this.metadataId = metadataId;
return this;
}
public String getValue() {
return value;
}
public DocumentMetadata setValue(String value) {
this.value = value;
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", id)
.add("documentId", documentId)
.add("metadataId", metadataId)
.toString();
}
}

View File

@ -49,7 +49,6 @@ public class File implements Loggable {
/** /**
* OCR-ized content. * OCR-ized content.
*/ */
@Lob
@Column(name = "FIL_CONTENT_C") @Column(name = "FIL_CONTENT_C")
private String content; private String content;
@ -71,6 +70,24 @@ public class File implements Loggable {
@Column(name = "FIL_ORDER_N") @Column(name = "FIL_ORDER_N")
private Integer order; private Integer order;
/**
* Version ID.
*/
@Column(name = "FIL_IDVERSION_C")
private String versionId;
/**
* Version number (starting at 0).
*/
@Column(name = "FIL_VERSION_N", nullable = false)
private Integer version;
/**
* True if it's the latest version of the file.
*/
@Column(name = "FIL_LATESTVERSION_B", nullable = false)
private boolean latestVersion;
/** /**
* Private key to decrypt the file. * Private key to decrypt the file.
* Not saved to database, of course. * Not saved to database, of course.
@ -160,6 +177,33 @@ public class File implements Loggable {
this.privateKey = privateKey; this.privateKey = privateKey;
} }
public String getVersionId() {
return versionId;
}
public File setVersionId(String versionId) {
this.versionId = versionId;
return this;
}
public Integer getVersion() {
return version;
}
public File setVersion(Integer version) {
this.version = version;
return this;
}
public boolean isLatestVersion() {
return latestVersion;
}
public File setLatestVersion(boolean latestVersion) {
this.latestVersion = latestVersion;
return this;
}
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)

View File

@ -0,0 +1,92 @@
package com.sismics.docs.core.model.jpa;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.constant.MetadataType;
import javax.persistence.*;
import java.util.Date;
/**
* Metadata entity.
*
* @author bgamard
*/
@Entity
@Table(name = "T_METADATA")
public class Metadata implements Loggable {
/**
* Metadata ID.
*/
@Id
@Column(name = "MET_ID_C", length = 36)
private String id;
/**
* Name.
*/
@Column(name = "MET_NAME_C", length = 50, nullable = false)
private String name;
/**
* Type.
*/
@Column(name = "MET_TYPE_C", length = 20, nullable = false)
@Enumerated(EnumType.STRING)
private MetadataType type;
/**
* Deletion date.
*/
@Column(name = "MET_DELETEDATE_D")
private Date deleteDate;
public String getId() {
return id;
}
public Metadata setId(String id) {
this.id = id;
return this;
}
public String getName() {
return name;
}
public Metadata setName(String name) {
this.name = name;
return this;
}
public MetadataType getType() {
return type;
}
public Metadata setType(MetadataType type) {
this.type = type;
return this;
}
@Override
public Date getDeleteDate() {
return deleteDate;
}
public void setDeleteDate(Date deleteDate) {
this.deleteDate = deleteDate;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", id)
.add("name", name)
.add("type", type)
.toString();
}
@Override
public String toMessage() {
return name;
}
}

View File

@ -47,6 +47,12 @@ public class User implements Loggable {
@Column(name = "USE_PRIVATEKEY_C", nullable = false, length = 100) @Column(name = "USE_PRIVATEKEY_C", nullable = false, length = 100)
private String privateKey; private String privateKey;
/**
* False when the user passed the onboarding.
*/
@Column(name = "USE_ONBOARDING_B", nullable = false)
private boolean onboarding;
/** /**
* TOTP secret key. * TOTP secret key.
*/ */
@ -198,6 +204,15 @@ public class User implements Loggable {
return this; return this;
} }
public boolean isOnboarding() {
return onboarding;
}
public User setOnboarding(boolean onboarding) {
this.onboarding = onboarding;
return this;
}
@Override @Override
public String toString() { public String toString() {
return MoreObjects.toStringHelper(this) return MoreObjects.toStringHelper(this)

View File

@ -85,7 +85,7 @@ public class FileService extends AbstractScheduledService {
* *
* @author bgamard * @author bgamard
*/ */
class TemporaryPathReference extends PhantomReference<Path> { static class TemporaryPathReference extends PhantomReference<Path> {
String path; String path;
TemporaryPathReference(Path referent, ReferenceQueue<? super Path> q) { TemporaryPathReference(Path referent, ReferenceQueue<? super Path> q) {
super(referent, q); super(referent, q);

View File

@ -1,16 +1,21 @@
package com.sismics.docs.core.service; package com.sismics.docs.core.service;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService; import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType; import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao; import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent; import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document; import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag; import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.ConfigUtil; import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.util.DocumentUtil; import com.sismics.docs.core.util.DocumentUtil;
import com.sismics.docs.core.util.FileUtil; import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil; import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.EmailUtil; import com.sismics.util.EmailUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
@ -19,9 +24,10 @@ import org.slf4j.LoggerFactory;
import javax.mail.*; import javax.mail.*;
import javax.mail.search.FlagTerm; import javax.mail.search.FlagTerm;
import java.util.Date; import java.util.*;
import java.util.Properties;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** /**
* Inbox scanning service. * Inbox scanning service.
@ -79,11 +85,13 @@ public class InboxService extends AbstractScheduledService {
lastSyncDate = new Date(); lastSyncDate = new Date();
lastSyncMessageCount = 0; lastSyncMessageCount = 0;
try { try {
HashMap<String, String> tagsNameToId = getAllTags();
inbox = openInbox(); inbox = openInbox();
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false)); Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
log.info(messages.length + " messages found"); log.info(messages.length + " messages found");
for (Message message : messages) { for (Message message : messages) {
importMessage(message); importMessage(message, tagsNameToId);
lastSyncMessageCount++; lastSyncMessageCount++;
} }
} catch (FolderClosedException e) { } catch (FolderClosedException e) {
@ -94,7 +102,8 @@ public class InboxService extends AbstractScheduledService {
} finally { } finally {
try { try {
if (inbox != null) { if (inbox != null) {
inbox.close(false); // The parameter controls if the messages flagged to be deleted, should actually get deleted.
inbox.close(ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED));
inbox.getStore().close(); inbox.getStore().close();
} }
} catch (Exception e) { } catch (Exception e) {
@ -183,7 +192,7 @@ public class InboxService extends AbstractScheduledService {
* @param message Message * @param message Message
* @throws Exception e * @throws Exception e
*/ */
private void importMessage(Message message) throws Exception { private void importMessage(Message message, HashMap<String, String> tags) throws Exception {
log.info("Importing message: " + message.getSubject()); log.info("Importing message: " + message.getSubject());
// Parse the mail // Parse the mail
@ -194,12 +203,27 @@ public class InboxService extends AbstractScheduledService {
// Create the document // Create the document
Document document = new Document(); Document document = new Document();
document.setUserId("admin"); String subject = mailContent.getSubject();
if (mailContent.getSubject() == null) { if (subject == null) {
document.setTitle("Imported email from EML file"); subject = "Imported email from EML file";
} else {
document.setTitle(StringUtils.abbreviate(mailContent.getSubject(), 100));
} }
HashSet<String> tagsFound = new HashSet<>();
if (tags != null) {
Pattern pattern = Pattern.compile("#([^\\s:#]+)");
Matcher matcher = pattern.matcher(subject);
while (matcher.find()) {
if (tags.containsKey(matcher.group(1)) && tags.get(matcher.group(1)) != null) {
tagsFound.add(tags.get(matcher.group(1)));
subject = subject.replaceFirst("#" + matcher.group(1), "");
}
}
log.debug("Tags found: " + String.join(", ", tagsFound));
subject = subject.trim().replaceAll(" +", " ");
}
document.setUserId("admin");
document.setTitle(StringUtils.abbreviate(subject, 100));
document.setDescription(StringUtils.abbreviate(mailContent.getMessage(), 4000)); document.setDescription(StringUtils.abbreviate(mailContent.getMessage(), 4000));
document.setSubject(StringUtils.abbreviate(mailContent.getSubject(), 500)); document.setSubject(StringUtils.abbreviate(mailContent.getSubject(), 500));
document.setFormat("EML"); document.setFormat("EML");
@ -212,7 +236,7 @@ public class InboxService extends AbstractScheduledService {
} }
// Save the document, create the base ACLs // Save the document, create the base ACLs
document = DocumentUtil.createDocument(document, "admin"); DocumentUtil.createDocument(document, "admin");
// Add the tag // Add the tag
String tagId = ConfigUtil.getConfigStringValue(ConfigType.INBOX_TAG); String tagId = ConfigUtil.getConfigStringValue(ConfigType.INBOX_TAG);
@ -220,21 +244,49 @@ public class InboxService extends AbstractScheduledService {
TagDao tagDao = new TagDao(); TagDao tagDao = new TagDao();
Tag tag = tagDao.getById(tagId); Tag tag = tagDao.getById(tagId);
if (tag != null) { if (tag != null) {
tagDao.updateTagList(document.getId(), Sets.newHashSet(tagId)); tagsFound.add(tagId);
} }
} }
// Update tags
if (!tagsFound.isEmpty()) {
new TagDao().updateTagList(document.getId(), tagsFound);
}
// Raise a document created event // Raise a document created event
DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent(); DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent();
documentCreatedAsyncEvent.setUserId("admin"); documentCreatedAsyncEvent.setUserId("admin");
documentCreatedAsyncEvent.setDocument(document); documentCreatedAsyncEvent.setDocumentId(document.getId());
ThreadLocalContext.get().addAsyncEvent(documentCreatedAsyncEvent); ThreadLocalContext.get().addAsyncEvent(documentCreatedAsyncEvent);
// Add files to the document // Add files to the document
for (EmailUtil.FileContent fileContent : mailContent.getFileContentList()) { for (EmailUtil.FileContent fileContent : mailContent.getFileContentList()) {
FileUtil.createFile(fileContent.getName(), fileContent.getFile(), fileContent.getSize(), FileUtil.createFile(fileContent.getName(), null, fileContent.getFile(), fileContent.getSize(),
document.getLanguage(), "admin", document.getId()); document.getLanguage(), "admin", document.getId());
} }
if (ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED)) {
message.setFlag(Flags.Flag.DELETED, true);
}
}
/**
* Fetches a HashMap with all tag names as keys and their respective ids as values.
*
* @return HashMap with all tags or null if not enabled
*/
private HashMap<String, String> getAllTags() {
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
return null;
}
TagDao tagDao = new TagDao();
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
HashMap<String, String> tagsNameToId = new HashMap<>();
for (TagDto tagDto : tags) {
tagsNameToId.put(tagDto.getName(), tagDto.getId());
}
return tagsNameToId;
} }
public Date getLastSyncDate() { public Date getLastSyncDate() {

View File

@ -50,6 +50,19 @@ public class ConfigUtil {
return Integer.parseInt(value); return Integer.parseInt(value);
} }
/**
* Returns the long value of a configuration parameter.
*
* @param configType Type of the configuration parameter
* @return Long value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
*/
public static long getConfigLongValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Long.parseLong(value);
}
/** /**
* Returns the boolean value of a configuration parameter. * Returns the boolean value of a configuration parameter.
* *

View File

@ -22,9 +22,9 @@ public class DirectoryUtil {
*/ */
public static Path getBaseDataDirectory() { public static Path getBaseDataDirectory() {
Path baseDataDir = null; Path baseDataDir = null;
if (StringUtils.isNotBlank(EnvironmentUtil.getDocsHome())) { if (StringUtils.isNotBlank(EnvironmentUtil.getTeedyHome())) {
// If the docs.home property is set then use it // If the docs.home property is set then use it
baseDataDir = Paths.get(EnvironmentUtil.getDocsHome()); baseDataDir = Paths.get(EnvironmentUtil.getTeedyHome());
} else if (EnvironmentUtil.isUnitTest()) { } else if (EnvironmentUtil.isUnitTest()) {
// For unit testing, use a temporary directory // For unit testing, use a temporary directory
baseDataDir = Paths.get(System.getProperty("java.io.tmpdir")); baseDataDir = Paths.get(System.getProperty("java.io.tmpdir"));

View File

@ -32,6 +32,7 @@ public class EncryptionUtil {
static { static {
// Initialize Bouncy Castle provider // Initialize Bouncy Castle provider
Security.insertProviderAt(new BouncyCastleProvider(), 1); Security.insertProviderAt(new BouncyCastleProvider(), 1);
Security.removeProvider("SunRsaSign");
} }
/** /**

View File

@ -18,6 +18,8 @@ import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread; import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil; import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.crypto.Cipher; import javax.crypto.Cipher;
import javax.crypto.CipherInputStream; import javax.crypto.CipherInputStream;
@ -36,6 +38,11 @@ import java.util.*;
* @author bgamard * @author bgamard
*/ */
public class FileUtil { public class FileUtil {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileUtil.class);
/** /**
* File ID of files currently being processed. * File ID of files currently being processed.
*/ */
@ -76,12 +83,12 @@ public class FileUtil {
/** /**
* Remove a file from the storage filesystem. * Remove a file from the storage filesystem.
* *
* @param file File to delete * @param fileId ID of file to delete
*/ */
public static void delete(File file) throws IOException { public static void delete(String fileId) throws IOException {
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(file.getId()); Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
Path webFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_web"); Path webFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_web");
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_thumb"); Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_thumb");
if (Files.exists(storedFile)) { if (Files.exists(storedFile)) {
Files.delete(storedFile); Files.delete(storedFile);
@ -98,6 +105,7 @@ public class FileUtil {
* Create a new file. * Create a new file.
* *
* @param name File name, can be null * @param name File name, can be null
* @param previousFileId ID of the previous version of the file, if the new file is a new version
* @param unencryptedFile Path to the unencrypted file * @param unencryptedFile Path to the unencrypted file
* @param fileSize File size * @param fileSize File size
* @param language File language, can be null if associated to no document * @param language File language, can be null if associated to no document
@ -106,7 +114,7 @@ public class FileUtil {
* @return File ID * @return File ID
* @throws Exception e * @throws Exception e
*/ */
public static String createFile(String name, Path unencryptedFile, long fileSize, String language, String userId, String documentId) throws Exception { public static String createFile(String name, String previousFileId, Path unencryptedFile, long fileSize, String language, String userId, String documentId) throws Exception {
// Validate mime type // Validate mime type
String mimeType; String mimeType;
try { try {
@ -125,29 +133,52 @@ public class FileUtil {
// Validate global quota // Validate global quota
String globalStorageQuotaStr = System.getenv(Constants.GLOBAL_QUOTA_ENV); String globalStorageQuotaStr = System.getenv(Constants.GLOBAL_QUOTA_ENV);
if (!Strings.isNullOrEmpty(globalStorageQuotaStr)) { if (!Strings.isNullOrEmpty(globalStorageQuotaStr)) {
long globalStorageQuota = Long.valueOf(globalStorageQuotaStr); long globalStorageQuota = Long.parseLong(globalStorageQuotaStr);
long globalStorageCurrent = userDao.getGlobalStorageCurrent(); long globalStorageCurrent = userDao.getGlobalStorageCurrent();
if (globalStorageCurrent + fileSize > globalStorageQuota) { if (globalStorageCurrent + fileSize > globalStorageQuota) {
throw new IOException("QuotaReached"); throw new IOException("QuotaReached");
} }
} }
// Get files of this document // Prepare the file
FileDao fileDao = new FileDao();
int order = 0;
if (documentId != null) {
for (File file : fileDao.getByDocumentId(userId, documentId)) {
file.setOrder(order++);
}
}
// Create the file
File file = new File(); File file = new File();
file.setOrder(order); file.setOrder(0);
file.setVersion(0);
file.setLatestVersion(true);
file.setDocumentId(documentId); file.setDocumentId(documentId);
file.setName(StringUtils.abbreviate(name, 200)); file.setName(StringUtils.abbreviate(name, 200));
file.setMimeType(mimeType); file.setMimeType(mimeType);
file.setUserId(userId); file.setUserId(userId);
// Get files of this document
FileDao fileDao = new FileDao();
if (documentId != null) {
if (previousFileId == null) {
// It's not a new version, so put it in last order
file.setOrder(fileDao.getByDocumentId(userId, documentId).size());
} else {
// It's a new version, update the previous version
File previousFile = fileDao.getActiveById(previousFileId);
if (previousFile == null || !previousFile.getDocumentId().equals(documentId)) {
throw new IOException("Previous version mismatch");
}
if (previousFile.getVersionId() == null) {
previousFile.setVersionId(UUID.randomUUID().toString());
}
// Copy the previous file metadata
file.setOrder(previousFile.getOrder());
file.setVersionId(previousFile.getVersionId());
file.setVersion(previousFile.getVersion() + 1);
// Update the previous file
previousFile.setLatestVersion(false);
fileDao.update(previousFile);
}
}
// Create the file
String fileId = fileDao.create(file, userId); String fileId = fileDao.create(file, userId);
// Save the file // Save the file
@ -166,7 +197,7 @@ public class FileUtil {
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent(); FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
fileCreatedAsyncEvent.setUserId(userId); fileCreatedAsyncEvent.setUserId(userId);
fileCreatedAsyncEvent.setLanguage(language); fileCreatedAsyncEvent.setLanguage(language);
fileCreatedAsyncEvent.setFile(file); fileCreatedAsyncEvent.setFileId(file.getId());
fileCreatedAsyncEvent.setUnencryptedFile(unencryptedFile); fileCreatedAsyncEvent.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(fileCreatedAsyncEvent); ThreadLocalContext.get().addAsyncEvent(fileCreatedAsyncEvent);
@ -187,6 +218,7 @@ public class FileUtil {
*/ */
public static void startProcessingFile(String fileId) { public static void startProcessingFile(String fileId) {
processingFileSet.add(fileId); processingFileSet.add(fileId);
log.info("Processing started for file: " + fileId);
} }
/** /**
@ -196,6 +228,7 @@ public class FileUtil {
*/ */
public static void endProcessingFile(String fileId) { public static void endProcessingFile(String fileId) {
processingFileSet.remove(fileId); processingFileSet.remove(fileId);
log.info("Processing ended for file: " + fileId);
} }
/** /**

View File

@ -0,0 +1,196 @@
package com.sismics.docs.core.util;
import com.google.common.collect.Maps;
import com.sismics.docs.core.constant.MetadataType;
import com.sismics.docs.core.dao.DocumentMetadataDao;
import com.sismics.docs.core.dao.MetadataDao;
import com.sismics.docs.core.dao.criteria.MetadataCriteria;
import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
import com.sismics.docs.core.dao.dto.MetadataDto;
import com.sismics.docs.core.model.jpa.DocumentMetadata;
import com.sismics.docs.core.util.jpa.SortCriteria;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObjectBuilder;
import java.text.MessageFormat;
import java.util.List;
import java.util.Map;
/**
* Metadata utilities.
*
* @author bgamard
*/
public class MetadataUtil {
/**
* Update custom metadata on a document.
*
* @param documentId Document ID
* @param metadataIdList Metadata ID list
* @param metadataValueList Metadata value list
*/
public static void updateMetadata(String documentId, List<String> metadataIdList, List<String> metadataValueList) throws Exception {
if (metadataIdList == null || metadataValueList == null || metadataIdList.isEmpty()) {
return;
}
if (metadataIdList.size() != metadataValueList.size()) {
throw new Exception("metadata_id and metadata_value must have the same length");
}
Map<String, String> newValues = Maps.newHashMap();
for (int i = 0; i < metadataIdList.size(); i++) {
newValues.put(metadataIdList.get(i), metadataValueList.get(i));
}
MetadataDao metadataDao = new MetadataDao();
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
List<MetadataDto> metadataDtoList = metadataDao.findByCriteria(new MetadataCriteria(), null);
List<DocumentMetadataDto> documentMetadataDtoList = documentMetadataDao.getByDocumentId(documentId);
// Update existing values
for (DocumentMetadataDto documentMetadataDto : documentMetadataDtoList) {
if (newValues.containsKey(documentMetadataDto.getMetadataId())) {
// Update the value
String value = newValues.get(documentMetadataDto.getMetadataId());
validateValue(documentMetadataDto.getType(), value);
updateValue(documentMetadataDto.getId(), value);
newValues.remove(documentMetadataDto.getMetadataId());
} else {
// Remove the value
updateValue(documentMetadataDto.getId(), null);
}
}
// Create new values
for (Map.Entry<String, String> entry : newValues.entrySet()) {
// Search the metadata definition
MetadataDto metadata = null;
for (MetadataDto metadataDto : metadataDtoList) {
if (metadataDto.getId().equals(entry.getKey())) {
metadata = metadataDto;
break;
}
}
if (metadata == null) {
throw new Exception(MessageFormat.format("Metadata not found: {0}", entry.getKey()));
}
// Add the value
validateValue(metadata.getType(), entry.getValue());
createValue(documentId, entry.getKey(), entry.getValue());
}
}
/**
* Validate a custom metadata value.
*
* @param type Metadata type
* @param value Value
* @throws Exception In case of validation error
*/
private static void validateValue(MetadataType type, String value) throws Exception {
switch (type) {
case STRING:
case BOOLEAN:
return;
case DATE:
try {
Long.parseLong(value);
} catch (NumberFormatException e) {
throw new Exception("Date value not parsable as timestamp");
}
break;
case FLOAT:
try {
Double.parseDouble(value);
} catch (NumberFormatException e) {
throw new Exception("Float value not parsable");
}
break;
case INTEGER:
try {
Integer.parseInt(value);
} catch (NumberFormatException e) {
throw new Exception("Integer value not parsable");
}
break;
}
}
/**
* Create a custom metadata value on a document.
*
* @param documentId Document ID
* @param metadataId Metadata ID
* @param value Value
*/
private static void createValue(String documentId, String metadataId, String value) {
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
DocumentMetadata documentMetadata = new DocumentMetadata();
documentMetadata.setDocumentId(documentId);
documentMetadata.setMetadataId(metadataId);
documentMetadata.setValue(value);
documentMetadataDao.create(documentMetadata);
}
/**
* Update a custom metadata value.
*
* @param documentMetadataId Document metadata ID
* @param value Value
*/
private static void updateValue(String documentMetadataId, String value) {
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
DocumentMetadata documentMetadata = new DocumentMetadata();
documentMetadata.setId(documentMetadataId);
documentMetadata.setValue(value);
documentMetadataDao.update(documentMetadata);
}
/**
* Add custom metadata to a JSON response.
*
* @param json JSON
* @param documentId Document ID
*/
public static void addMetadata(JsonObjectBuilder json, String documentId) {
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
MetadataDao metadataDao = new MetadataDao();
List<MetadataDto> metadataDtoList = metadataDao.findByCriteria(new MetadataCriteria(), new SortCriteria(1, true));
List<DocumentMetadataDto> documentMetadataDtoList = documentMetadataDao.getByDocumentId(documentId);
JsonArrayBuilder metadata = Json.createArrayBuilder();
for (MetadataDto metadataDto : metadataDtoList) {
JsonObjectBuilder meta = Json.createObjectBuilder()
.add("id", metadataDto.getId())
.add("name", metadataDto.getName())
.add("type", metadataDto.getType().name());
for (DocumentMetadataDto documentMetadataDto : documentMetadataDtoList) {
if (documentMetadataDto.getMetadataId().equals(metadataDto.getId())) {
if (documentMetadataDto.getValue() != null) {
switch (metadataDto.getType()) {
case STRING:
meta.add("value", documentMetadataDto.getValue());
break;
case BOOLEAN:
meta.add("value", Boolean.parseBoolean(documentMetadataDto.getValue()));
break;
case DATE:
meta.add("value", Long.parseLong(documentMetadataDto.getValue()));
break;
case FLOAT:
meta.add("value", Double.parseDouble(documentMetadataDto.getValue()));
break;
case INTEGER:
meta.add("value", Integer.parseInt(documentMetadataDto.getValue()));
break;
}
}
}
}
metadata.add(meta);
}
json.add("metadata", metadata);
}
}

View File

@ -6,6 +6,7 @@ import com.sismics.docs.core.constant.AclType;
import com.sismics.docs.core.constant.PermType; import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.AclDao; import com.sismics.docs.core.dao.AclDao;
import com.sismics.docs.core.dao.DocumentDao; import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.dao.RouteModelDao;
import com.sismics.docs.core.dao.UserDao; import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.dao.criteria.UserCriteria; import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.RouteStepDto; import com.sismics.docs.core.dao.dto.RouteStepDto;
@ -15,8 +16,14 @@ import com.sismics.docs.core.event.RouteStepValidateEvent;
import com.sismics.docs.core.model.context.AppContext; import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Acl; import com.sismics.docs.core.model.jpa.Acl;
import com.sismics.docs.core.model.jpa.Document; import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.RouteModel;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import java.io.StringReader;
import java.util.List; import java.util.List;
/** /**
@ -87,4 +94,31 @@ public class RoutingUtil {
AppContext.getInstance().getMailEventBus().post(routeStepValidateEvent); AppContext.getInstance().getMailEventBus().post(routeStepValidateEvent);
} }
} }
/**
* Find the first route model name matching a target type and name.
*
* @param targetType Target type
* @param targetName Target name
* @return Route model name or null if none is matching
*/
public static String findRouteModelNameByTargetName(AclTargetType targetType, String targetName) {
RouteModelDao routeModelDao = new RouteModelDao();
List<RouteModel> routeModelList = routeModelDao.findAll();
for (RouteModel routeModel : routeModelList) {
try (JsonReader reader = Json.createReader(new StringReader(routeModel.getSteps()))) {
JsonArray stepsJson = reader.readArray();
for (int order = 0; order < stepsJson.size(); order++) {
JsonObject step = stepsJson.getJsonObject(order);
JsonObject target = step.getJsonObject("target");
AclTargetType routeTargetType = AclTargetType.valueOf(target.getString("type"));
String routeTargetName = target.getString("name");
if (targetType == routeTargetType && targetName.equals(routeTargetName)) {
return routeModel.getName();
}
}
}
}
return null;
}
} }

View File

@ -48,7 +48,7 @@ public class ProcessFilesAction implements Action {
FileUpdatedAsyncEvent event = new FileUpdatedAsyncEvent(); FileUpdatedAsyncEvent event = new FileUpdatedAsyncEvent();
event.setUserId("admin"); event.setUserId("admin");
event.setLanguage(documentDto.getLanguage()); event.setLanguage(documentDto.getLanguage());
event.setFile(file); event.setFileId(file.getId());
event.setUnencryptedFile(unencryptedFile); event.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(event); ThreadLocalContext.get().addAsyncEvent(event);
} }

View File

@ -0,0 +1,131 @@
package com.sismics.docs.core.util.authentication;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ClasspathScanner;
import org.apache.commons.pool.impl.GenericObjectPool;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.DefaultLdapConnectionFactory;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapConnectionPool;
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
/**
* LDAP authentication handler.
*
* @author bgamard
*/
@ClasspathScanner.Priority(50) // Before the internal database
public class LdapAuthenticationHandler implements AuthenticationHandler {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/**
* LDAP connection pool.
*/
private static LdapConnectionPool pool;
/**
* Reset the LDAP pool.
*/
public static void reset() {
if (pool != null) {
try {
pool.close();
} catch (Exception e) {
// NOP
}
}
pool = null;
}
/**
* Initialize the LDAP pool.
*/
private static void init() {
ConfigDao configDao = new ConfigDao();
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
if (pool != null || ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return;
}
LdapConnectionConfig config = new LdapConnectionConfig();
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config);
GenericObjectPool.Config poolConfig = new GenericObjectPool.Config();
poolConfig.whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
poolConfig.maxWait = 500;
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), poolConfig);
}
@Override
public User authenticate(String username, String password) {
init();
if (pool == null) {
return null;
}
// Fetch and authenticate the user
Entry userEntry;
try {
EntryCursor cursor = pool.getConnection().search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
if (cursor.next()) {
userEntry = cursor.get();
pool.getConnection().bind(userEntry.getDn(), password);
} else {
// User not found
return null;
}
} catch (Exception e) {
log.error("Error authenticating \"" + username + "\" using the LDAP", e);
return null;
}
UserDao userDao = new UserDao();
User user = userDao.getActiveByUsername(username);
if (user == null) {
// The user is valid but never authenticated, create the user now
log.info("\"" + username + "\" authenticated for the first time, creating the internal user");
user = new User();
user.setRoleId(Constants.DEFAULT_USER_ROLE);
user.setUsername(username);
user.setPassword(UUID.randomUUID().toString()); // No authentication using the internal database
Attribute mailAttribute = userEntry.get("mail");
if (mailAttribute == null || mailAttribute.get() == null) {
user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL));
} else {
Value<?> value = mailAttribute.get();
user.setEmail(value.getString());
}
user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));
try {
userDao.create(user, "admin");
} catch (Exception e) {
log.error("Error while creating the internal user", e);
return null;
}
}
return user;
}
}

View File

@ -6,6 +6,7 @@ import com.sismics.util.mime.MimeType;
import org.apache.pdfbox.io.MemoryUsageSetting; import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.multipdf.PDFMergerUtility; import org.apache.pdfbox.multipdf.PDFMergerUtility;
import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.rendering.ImageType;
import org.apache.pdfbox.rendering.PDFRenderer; import org.apache.pdfbox.rendering.PDFRenderer;
import org.apache.pdfbox.text.PDFTextStripper; import org.apache.pdfbox.text.PDFTextStripper;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -60,7 +61,7 @@ public class PdfFormatHandler implements FormatHandler {
for (int pageIndex = 0; pageIndex < pdfDocument.getNumberOfPages(); pageIndex++) { for (int pageIndex = 0; pageIndex < pdfDocument.getNumberOfPages(); pageIndex++) {
log.info("OCR page " + (pageIndex + 1) + "/" + pdfDocument.getNumberOfPages() + " of PDF file containing only images"); log.info("OCR page " + (pageIndex + 1) + "/" + pdfDocument.getNumberOfPages() + " of PDF file containing only images");
sb.append(" "); sb.append(" ");
sb.append(FileUtil.ocrFile(language, renderer.renderImage(pageIndex))); sb.append(FileUtil.ocrFile(language, renderer.renderImageWithDPI(pageIndex, 300, ImageType.GRAY)));
} }
return sb.toString(); return sb.toString();
} catch (Exception e) { } catch (Exception e) {

View File

@ -36,12 +36,23 @@ public interface IndexingHandler {
void shutDown(); void shutDown();
/** /**
* Fully rebuild the index. * Clear the index.
*/
void clearIndex();
/**
* Index a list of documents.
* *
* @param documentList All documents * @param documentList All documents
*/
void createDocuments(List<Document> documentList);
/**
* Index a list of files.
*
* @param fileList All files * @param fileList All files
*/ */
void rebuildIndex(List<Document> documentList, List<File> fileList); void createFiles(List<File> fileList);
/** /**
* Index a new document. * Index a new document.

View File

@ -9,6 +9,8 @@ import com.sismics.docs.core.constant.PermType;
import com.sismics.docs.core.dao.ConfigDao; import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.criteria.DocumentCriteria; import com.sismics.docs.core.dao.criteria.DocumentCriteria;
import com.sismics.docs.core.dao.dto.DocumentDto; import com.sismics.docs.core.dao.dto.DocumentDto;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Config; import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document; import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.File; import com.sismics.docs.core.model.jpa.File;
@ -25,8 +27,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil; import org.apache.lucene.queryparser.simple.SimpleQueryParser;
import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.search.highlight.Highlighter; import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.QueryScorer;
@ -43,6 +44,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.*; import java.util.*;
@ -84,6 +86,25 @@ public class LuceneIndexingHandler implements IndexingHandler {
@Override @Override
public void startUp() throws Exception { public void startUp() throws Exception {
try {
initLucene();
} catch (Exception e) {
// An error occurred initializing Lucene, the index is out of date or broken, delete everything
log.info("Unable to initialize Lucene, cleaning up the index: " + e.getMessage());
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
Files.walk(luceneDirectory)
.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(java.io.File::delete);
// Re-initialize and schedule a full reindex
initLucene();
RebuildIndexAsyncEvent rebuildIndexAsyncEvent = new RebuildIndexAsyncEvent();
AppContext.getInstance().getAsyncEventBus().post(rebuildIndexAsyncEvent);
}
}
private void initLucene() throws Exception {
ConfigDao configDao = new ConfigDao(); ConfigDao configDao = new ConfigDao();
Config luceneStorageConfig = configDao.getById(ConfigType.LUCENE_DIRECTORY_STORAGE); Config luceneStorageConfig = configDao.getById(ConfigType.LUCENE_DIRECTORY_STORAGE);
String luceneStorage = luceneStorageConfig == null ? null : luceneStorageConfig.getValue(); String luceneStorage = luceneStorageConfig == null ? null : luceneStorageConfig.getValue();
@ -101,7 +122,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
// Create an index writer // Create an index writer
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer()); IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
config.setCommitOnClose(true); config.setCommitOnClose(true);
config.setMergeScheduler(new SerialMergeScheduler()); config.setMergeScheduler(new ConcurrentMergeScheduler());
indexWriter = new IndexWriter(directory, config); indexWriter = new IndexWriter(directory, config);
// Check index version and rebuild it if necessary // Check index version and rebuild it if necessary
@ -142,18 +163,23 @@ public class LuceneIndexingHandler implements IndexingHandler {
} }
@Override @Override
public void rebuildIndex(final List<Document> documentList, final List<File> fileList) { public void clearIndex() {
handle(indexWriter -> { handle(IndexWriter::deleteAll);
// Empty index }
indexWriter.deleteAll();
// Add all documents @Override
public void createDocuments(List<Document> documentList) {
handle(indexWriter -> {
for (Document document : documentList) { for (Document document : documentList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document); org.apache.lucene.document.Document luceneDocument = getDocumentFromDocument(document);
indexWriter.addDocument(luceneDocument); indexWriter.addDocument(luceneDocument);
} }
});
}
// Add all files @Override
public void createFiles(List<File> fileList) {
handle(indexWriter -> {
for (File file : fileList) { for (File file : fileList) {
org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file); org.apache.lucene.document.Document luceneDocument = getDocumentFromFile(file);
indexWriter.addDocument(luceneDocument); indexWriter.addDocument(luceneDocument);
@ -214,7 +240,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
List<String> criteriaList = new ArrayList<>(); List<String> criteriaList = new ArrayList<>();
Map<String, String> documentSearchMap = Maps.newHashMap(); Map<String, String> documentSearchMap = Maps.newHashMap();
StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, "); StringBuilder sb = new StringBuilder("select distinct d.DOC_ID_C c0, d.DOC_TITLE_C c1, d.DOC_DESCRIPTION_C c2, d.DOC_CREATEDATE_D c3, d.DOC_LANGUAGE_C c4, d.DOC_IDFILE_C, ");
sb.append(" s.count c5, "); sb.append(" s.count c5, ");
sb.append(" f.count c6, "); sb.append(" f.count c6, ");
sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 "); sb.append(" rs2.RTP_ID_C c7, rs2.RTP_NAME_C, d.DOC_UPDATEDATE_D c8 ");
@ -225,7 +251,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " + " s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " + " left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
" FROM T_FILE f " + " FROM T_FILE f " +
" WHERE f.FIL_DELETEDATE_D IS NULL group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C "); " WHERE f.FIL_DELETEDATE_D is null group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
sb.append(" left join (select rs.*, rs3.idDocument " + sb.append(" left join (select rs.*, rs3.idDocument " +
"from T_ROUTE_STEP rs " + "from T_ROUTE_STEP rs " +
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " + "join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
@ -251,7 +277,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_ID_C in :documentIdList"); criteriaList.add("d.DOC_ID_C in :documentIdList");
parameterMap.put("documentIdList", documentSearchMap.keySet()); parameterMap.put("documentIdList", documentSearchMap.keySet());
suggestSearchTerms(criteria.getSearch(), suggestionList); suggestSearchTerms(criteria.getFullSearch(), suggestionList);
} }
if (criteria.getCreateDateMin() != null) { if (criteria.getCreateDateMin() != null) {
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin"); criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
@ -282,9 +308,27 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")"); criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
} }
} }
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();
for (String tagId : tagIdList) {
sb.append(String.format("left join T_DOCUMENT_TAG dtex%d on dtex%d.DOT_IDDOCUMENT_C = d.DOC_ID_C and dtex%d.DOT_IDTAG_C = :tagIdEx%d and dtex%d.DOT_DELETEDATE_D is null ", index, index, index, index, index));
parameterMap.put("tagIdEx" + index, tagId);
tagCriteriaList.add(String.format("dtex%d.DOT_ID_C is null", index));
index++;
}
criteriaList.add("(" + Joiner.on(" AND ").join(tagCriteriaList) + ")");
}
}
if (criteria.getShared() != null && criteria.getShared()) { if (criteria.getShared() != null && criteria.getShared()) {
criteriaList.add("s.count > 0"); criteriaList.add("s.count > 0");
} }
if (criteria.getMimeType() != null) {
sb.append("left join T_FILE f0 on f0.FIL_IDDOC_C = d.DOC_ID_C and f0.FIL_MIMETYPE_C = :mimeType and f0.FIL_DELETEDATE_D is null");
parameterMap.put("mimeType", criteria.getMimeType());
criteriaList.add("f0.FIL_ID_C is not null");
}
if (criteria.getLanguage() != null) { if (criteria.getLanguage() != null) {
criteriaList.add("d.DOC_LANGUAGE_C = :language"); criteriaList.add("d.DOC_LANGUAGE_C = :language");
parameterMap.put("language", criteria.getLanguage()); parameterMap.put("language", criteria.getLanguage());
@ -318,6 +362,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
documentDto.setDescription((String) o[i++]); documentDto.setDescription((String) o[i++]);
documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime()); documentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
documentDto.setLanguage((String) o[i++]); documentDto.setLanguage((String) o[i++]);
documentDto.setFileId((String) o[i++]);
Number shareCount = (Number) o[i++]; Number shareCount = (Number) o[i++];
documentDto.setShared(shareCount != null && shareCount.intValue() > 0); documentDto.setShared(shareCount != null && shareCount.intValue() > 0);
Number fileCount = (Number) o[i++]; Number fileCount = (Number) o[i++];
@ -349,7 +394,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
LuceneDictionary dictionary = new LuceneDictionary(directoryReader, "title"); LuceneDictionary dictionary = new LuceneDictionary(directoryReader, "title");
suggester.build(dictionary); suggester.build(dictionary);
int lastIndex = search.lastIndexOf(' '); int lastIndex = search.lastIndexOf(' ');
String suggestQuery = search.substring(lastIndex < 0 ? 0 : lastIndex); String suggestQuery = search.substring(Math.max(lastIndex, 0));
List<Lookup.LookupResult> lookupResultList = suggester.lookup(suggestQuery, false, 10); List<Lookup.LookupResult> lookupResultList = suggester.lookup(suggestQuery, false, 10);
for (Lookup.LookupResult lookupResult : lookupResultList) { for (Lookup.LookupResult lookupResult : lookupResultList) {
suggestionList.add(lookupResult.key.toString()); suggestionList.add(lookupResult.key.toString());
@ -365,29 +410,26 @@ public class LuceneIndexingHandler implements IndexingHandler {
* @throws Exception e * @throws Exception e
*/ */
private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception { private Map<String, String> search(String searchQuery, String fullSearchQuery) throws Exception {
// Escape query and add quotes so QueryParser generate a PhraseQuery // The fulltext query searches in all fields
String escapedSearchQuery = "\"" + QueryParserUtil.escape(searchQuery + " " + fullSearchQuery) + "\""; searchQuery = searchQuery + " " + fullSearchQuery;
String escapedFullSearchQuery = "\"" + QueryParserUtil.escape(fullSearchQuery) + "\"";
// Build search query // Build search query
Analyzer analyzer = new StandardAnalyzer(); Analyzer analyzer = new StandardAnalyzer();
StandardQueryParser qpHelper = new StandardQueryParser(analyzer);
qpHelper.setPhraseSlop(100); // PhraseQuery add terms
// Search on documents and files // Search on documents and files
BooleanQuery query = new BooleanQuery.Builder() BooleanQuery query = new BooleanQuery.Builder()
.add(qpHelper.parse(escapedSearchQuery, "title"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "title").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "description"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "description").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "subject"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "subject").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "identifier"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "identifier").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "publisher"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "publisher").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "format"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "format").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "source"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "source").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "type"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "type").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "coverage"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "coverage").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "rights"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "rights").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedSearchQuery, "filename"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "filename").parse(searchQuery), BooleanClause.Occur.SHOULD)
.add(qpHelper.parse(escapedFullSearchQuery, "content"), BooleanClause.Occur.SHOULD) .add(buildQueryParser(analyzer, "content").parse(fullSearchQuery), BooleanClause.Occur.SHOULD)
.build(); .build();
// Search // Search
@ -429,6 +471,19 @@ public class LuceneIndexingHandler implements IndexingHandler {
return documentMap; return documentMap;
} }
/**
* Build a query parser for searching.
*
* @param analyzer Analyzer
* @param field Field
* @return Query parser
*/
private SimpleQueryParser buildQueryParser(Analyzer analyzer, String field) {
SimpleQueryParser simpleQueryParser = new SimpleQueryParser(analyzer, field);
simpleQueryParser.setDefaultOperator(BooleanClause.Occur.MUST); // AND all the terms
return simpleQueryParser;
}
/** /**
* Build Lucene document from database document. * Build Lucene document from database document.
* *

View File

@ -36,7 +36,7 @@ public class PaginatedLists {
if (pageSize > MAX_PAGE_SIZE) { if (pageSize > MAX_PAGE_SIZE) {
pageSize = MAX_PAGE_SIZE; pageSize = MAX_PAGE_SIZE;
} }
return new PaginatedList<E>(pageSize, offset); return new PaginatedList<>(pageSize, offset);
} }
/** /**
@ -54,7 +54,7 @@ public class PaginatedLists {
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects * @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters * @param queryParam Query parameters
*/ */
public static <E> void executeCountQuery(PaginatedList<E> paginatedList, QueryParam queryParam) { private static <E> void executeCountQuery(PaginatedList<E> paginatedList, QueryParam queryParam) {
StringBuilder sb = new StringBuilder("select count(*) as result_count from ("); StringBuilder sb = new StringBuilder("select count(*) as result_count from (");
sb.append(queryParam.getQueryString()); sb.append(queryParam.getQueryString());
sb.append(") as t1"); sb.append(") as t1");
@ -70,7 +70,6 @@ public class PaginatedLists {
/** /**
* Executes a query and returns the data of the currunt page. * Executes a query and returns the data of the currunt page.
* *
* @param em EntityManager
* @param paginatedList Paginated list object containing parameters, and into which results are added by side effects * @param paginatedList Paginated list object containing parameters, and into which results are added by side effects
* @param queryParam Query parameters * @param queryParam Query parameters
* @return List of results * @return List of results

View File

@ -124,11 +124,11 @@ public class EmailUtil {
// Application name // Application name
Config themeConfig = configDao.getById(ConfigType.THEME); Config themeConfig = configDao.getById(ConfigType.THEME);
String appName = "Sismics Docs"; String appName = "Teedy";
if (themeConfig != null) { if (themeConfig != null) {
try (JsonReader reader = Json.createReader(new StringReader(themeConfig.getValue()))) { try (JsonReader reader = Json.createReader(new StringReader(themeConfig.getValue()))) {
JsonObject themeJson = reader.readObject(); JsonObject themeJson = reader.readObject();
appName = themeJson.getString("name", "Sismics Docs"); appName = themeJson.getString("name", "Teedy");
} }
} }

View File

@ -15,7 +15,7 @@ public class EnvironmentUtil {
private static String MAC_OS_USER_HOME = System.getProperty("user.home"); private static String MAC_OS_USER_HOME = System.getProperty("user.home");
private static String DOCS_HOME = System.getProperty("docs.home"); private static String TEEDY_HOME = System.getProperty("docs.home");
/** /**
* In a web application context. * In a web application context.
@ -90,8 +90,8 @@ public class EnvironmentUtil {
* *
* @return Home directory * @return Home directory
*/ */
public static String getDocsHome() { public static String getTeedyHome() {
return DOCS_HOME; return TEEDY_HOME;
} }
/** /**

View File

@ -48,8 +48,11 @@ public class DialectUtil {
sql = sql.replaceAll("(cached|memory) table", "table"); sql = sql.replaceAll("(cached|memory) table", "table");
sql = sql.replaceAll("datetime", "timestamp"); sql = sql.replaceAll("datetime", "timestamp");
sql = sql.replaceAll("longvarchar", "text"); sql = sql.replaceAll("longvarchar", "text");
sql = sql.replaceAll("bit not null", "bool not null"); sql = sql.replaceAll("bit default 1", "bool default true");
sql = sql.replaceAll("bit default 0", "bool default false"); sql = sql.replaceAll("bit default 0", "bool default false");
sql = sql.replaceAll("bit not null default 1", "bool not null default true");
sql = sql.replaceAll("bit not null default 0", "bool not null default false");
sql = sql.replaceAll("bit not null", "bool not null");
return sql; return sql;
} }
} }

View File

@ -88,7 +88,7 @@ public final class EMF {
if (databaseUrl == null) { if (databaseUrl == null) {
props.put("hibernate.connection.driver_class", "org.h2.Driver"); props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect"); props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536"); props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");
props.put("hibernate.connection.username", "sa"); props.put("hibernate.connection.username", "sa");
} else { } else {
props.put("hibernate.connection.driver_class", "org.postgresql.Driver"); props.put("hibernate.connection.driver_class", "org.postgresql.Driver");

View File

@ -1 +1 @@
db.version=20 db.version=25

View File

@ -7,5 +7,7 @@ alter table T_ROUTE_STEP add constraint FK_RTP_IDROUTE_C foreign key (RTP_IDROUT
alter table T_ROUTE_STEP add constraint FK_RTP_IDVALIDATORUSER_C foreign key (RTP_IDVALIDATORUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict; alter table T_ROUTE_STEP add constraint FK_RTP_IDVALIDATORUSER_C foreign key (RTP_IDVALIDATORUSER_C) references T_USER (USE_ID_C) on delete restrict on update restrict;
insert into T_ROUTE_MODEL (RTM_ID_C, RTM_NAME_C, RTM_STEPS_C, RTM_CREATEDATE_D) values ('default-document-review', 'Document review', '[{"type":"VALIDATE","target":{"name":"administrators","type":"GROUP"},"name":"Check the document''s metadata"},{"type":"VALIDATE","target":{"name":"administrators","type":"GROUP"},"name":"Add relevant files to the document"},{"type":"APPROVE","target":{"name":"administrators","type":"GROUP"},"name":"Approve the document"}]', now()); insert into T_ROUTE_MODEL (RTM_ID_C, RTM_NAME_C, RTM_STEPS_C, RTM_CREATEDATE_D) values ('default-document-review', 'Document review', '[{"type":"VALIDATE","target":{"name":"administrators","type":"GROUP"},"name":"Check the document''s metadata"},{"type":"VALIDATE","target":{"name":"administrators","type":"GROUP"},"name":"Add relevant files to the document"},{"type":"APPROVE","target":{"name":"administrators","type":"GROUP"},"name":"Approve the document"}]', now());
insert into T_ACL (ACL_ID_C, ACL_PERM_C, ACL_SOURCEID_C, ACL_TARGETID_C) values ('acl-admin-default-route-read', 'READ', 'default-document-review', 'administrators');
insert into T_ACL (ACL_ID_C, ACL_PERM_C, ACL_SOURCEID_C, ACL_TARGETID_C) values ('acl-admin-default-route-write', 'WRITE', 'default-document-review', 'administrators');
update T_CONFIG set CFG_VALUE_C = '15' where CFG_ID_C = 'DB_VERSION'; update T_CONFIG set CFG_VALUE_C = '15' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,4 @@
alter table T_DOCUMENT add column DOC_IDFILE_C varchar(36);
alter table T_DOCUMENT add constraint FK_DOC_IDFILE_C foreign key (DOC_IDFILE_C) references T_FILE (FIL_ID_C) on delete restrict on update restrict;
update T_CONFIG set CFG_VALUE_C = '21' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,5 @@
alter table T_FILE add column FIL_VERSION_N int not null default 0;
alter table T_FILE add column FIL_LATESTVERSION_B bit not null default 1;
alter table T_FILE add column FIL_IDVERSION_C varchar(36);
update T_CONFIG set CFG_VALUE_C = '22' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,2 @@
alter table T_USER add column USE_ONBOARDING_B bit not null default 1;
update T_CONFIG set CFG_VALUE_C = '23' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,5 @@
create cached table T_METADATA ( MET_ID_C varchar(36) not null, MET_NAME_C varchar(50) not null, MET_TYPE_C varchar(20) not null, MET_DELETEDATE_D datetime, primary key (MET_ID_C) );
create cached table T_DOCUMENT_METADATA ( DME_ID_C varchar(36) not null, DME_IDDOCUMENT_C varchar(36) not null, DME_IDMETADATA_C varchar(36) not null, DME_VALUE_C varchar(4000) null, primary key (DME_ID_C) );
alter table T_DOCUMENT_METADATA add constraint FK_DME_IDDOCUMENT_C foreign key (DME_IDDOCUMENT_C) references T_DOCUMENT (DOC_ID_C) on delete restrict on update restrict;
alter table T_DOCUMENT_METADATA add constraint FK_DME_IDMETADATA_C foreign key (DME_IDMETADATA_C) references T_METADATA (MET_ID_C) on delete restrict on update restrict;
update T_CONFIG set CFG_VALUE_C = '24' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,3 @@
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_AUTOMATIC_TAGS', 'false');
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_DELETE_IMPORTED', 'false');
update T_CONFIG set CFG_VALUE_C = '25' where CFG_ID_C = 'DB_VERSION';

View File

@ -0,0 +1,10 @@
email.template.password_recovery.subject=Bitte setzen Sie ihr Passwort zur\u00FCck
email.template.password_recovery.hello=Hallo {0}.
email.template.password_recovery.instruction1=Wir haben eine Anfrage zum Zur\u00FCcksetzen Ihres Passworts erhalten.<br/>Wenn Sie keine Hilfe angefordert haben, k\u00F6nnen Sie diese E-Mail einfach ignorieren.
email.template.password_recovery.instruction2=Um Ihr Passwort zur\u00FCckzusetzen, besuchen Sie bitte den folgenden Link:
email.template.password_recovery.click_here=Klicken Sie hier, um Ihr Passwort zur\u00FCckzusetzen
email.template.route_step_validate.subject=Ein Dokument braucht Ihre Aufmerksamkeit
email.template.route_step_validate.hello=Hallo {0}.
email.template.route_step_validate.instruction1=Ihnen wurde ein Workflow-Schritt zugewiesen, der Ihre Aufmerksamkeit erfordert.
email.template.route_step_validate.instruction2=Um das Dokument anzuzeigen und den Workflow zu \u00FCberpr\u00FCfen, besuchen Sie bitte den folgenden Link:
email.no_html.error=Ihr E-Mail-Client unterst\u00FCtzt keine HTML-Nachrichten

View File

@ -0,0 +1,10 @@
email.template.password_recovery.subject=Bitte setzen Sie ihr Passwort zur\u00FCck
email.template.password_recovery.hello=Hallo {0}.
email.template.password_recovery.instruction1=Wir haben eine Anfrage zum Zur\u00FCcksetzen Ihres Passworts erhalten.<br/>Wenn Sie keine Hilfe angefordert haben, können Sie diese E-Mail einfach ignorieren.
email.template.password_recovery.instruction2=Um Ihr Passwort zur\u00FCckzusetzen, besuchen Sie bitte den folgenden Link:
email.template.password_recovery.click_here=Klicken Sie hier, um Ihr Passwort zur\u00FCckzusetzen
email.template.route_step_validate.subject=Ein Dokument braucht Ihre Aufmerksamkeit
email.template.route_step_validate.hello=Hallo {0}.
email.template.route_step_validate.instruction1=Ihnen wurde ein Workflow-Schritt zugewiesen, der Ihre Aufmerksamkeit erfordert.
email.template.route_step_validate.instruction2=Um das Dokument anzuzeigen und den Workflow zu \u00FCberpr\u00FCfen, besuchen Sie bitte den folgenden Link:
email.no_html.error=Ihr E-Mail-Client unterst\u00FCtzt keine HTML-Nachrichten

View File

@ -0,0 +1,27 @@
package com.sismics.util.format;
import com.sismics.docs.core.util.format.PdfFormatHandler;
import org.junit.Assert;
import org.junit.Test;
import java.nio.file.Paths;
/**
* Test of {@link PdfFormatHandler}
*
* @author bgamard
*/
public class TestPdfFormatHandler {
/**
* Test related to https://github.com/sismics/docs/issues/373.
*/
@Test
public void testIssue373() throws Exception {
PdfFormatHandler formatHandler = new PdfFormatHandler();
String content = formatHandler.extractContent("deu", Paths.get(ClassLoader.getSystemResource("file/issue373.pdf").toURI()));
Assert.assertTrue(content.contains("Aufrechterhaltung"));
Assert.assertTrue(content.contains("Außentemperatur"));
Assert.assertTrue(content.contains("Grundumsatzmessungen"));
Assert.assertTrue(content.contains("ermitteln"));
}
}

Binary file not shown.

View File

@ -7,3 +7,4 @@ log4j.appender.MEMORY.size=1000
log4j.logger.com.sismics=INFO log4j.logger.com.sismics=INFO
log4j.logger.org.hibernate=ERROR log4j.logger.org.hibernate=ERROR
log4j.logger.org.apache.directory=ERROR

16
docs-importer/Dockerfile Normal file
View File

@ -0,0 +1,16 @@
FROM node:14.2-alpine AS builder
WORKDIR /build
COPY main.js package-lock.json package.json ./
RUN npm install && npm install -g pkg
RUN pkg -t node14-alpine-x64 .
FROM alpine
ENV TEEDY_TAG= TEEDY_ADDTAGS=false TEEDY_LANG=eng TEEDY_URL='http://localhost:8080' TEEDY_USERNAME=username TEEDY_PASSWORD=password TEEDY_COPYFOLDER= TEEDY_FILEFILTER=
RUN apk add --no-cache \
libc6-compat \
libstdc++
ADD pref /root/.config/preferences/com.sismics.docs.importer.pref
ADD env.sh /
COPY --from=builder /build/teedy-importer ./
CMD ["/bin/ash","-c","/env.sh && /teedy-importer -d"]

View File

@ -1,33 +1,51 @@
File Importer # File Importer
=============
This tool can be used to do a single import of files or to periodically scan for files in an input folder. This tool can be used to do a single import of files or to periodically scan for files in an input folder.
Downloads ## Downloads
---------
Built binaries for Windows/Linux/MacOSX can be found at <https://github.com/sismics/docs/releases> Built binaries for Windows/Linux/MacOSX can be found at <https://github.com/sismics/docs/releases>
Usage ## Usage
-----
```console ```console
./docs-importer-macos (for MacOSX) ./docs-importer-macos (for MacOSX)
./docs-importer-linux (for Linux) ./docs-importer-linux (for Linux)
docs-importer-win.exe (for Windows) docs-importer-win.exe (for Windows)
``` ```
A wizard will ask you for the import configuration and write it in `~/.config/preferences/com.sismics.docs.importer.pref` A wizard will ask you for the import configuration and write it in `~/.config/preferences/com.sismics.docs.importer.pref`.
Words following a `#` in the filename will be added as tags to the document, if there is a tag with the same name on the Server.
For the next start, pass the `-d` argument to skip the wizard: For the next start, pass the `-d` argument to skip the wizard:
```console ```console
./docs-importer-linux -d ./docs-importer-linux -d
``` ```
Daemon mode ## Daemon mode
-----------
The daemon mode scan the input directory every 30 seconds for new files. Once a file is found and imported, it is **deleted**. The daemon mode scan the input directory every 30 seconds for new files. Once a file is found and imported, it is **deleted**. You can set a `copyFolder` to copy the file to before deletion.
## Docker
The docker image needs a volume mounted from a previously generated preference file at `/root/.config/preferences/com.sismics.docs.importer.pref`. The container will start the importer in daemon mode. It will look for files in `/import`.
Example usage:
```
docker run --name teedy-import -d -v /path/to/preferencefile:/root/.config/preferences/com.sismics.docs.importer.pref -v /path/to/import/folder:/import sismics/docs-importer:latest
```
### Environment variables
Instead of mounting the preferences file, the options can also be set by setting the environment variables `TEEDY_TAG`, `TEEDY_ADDTAGS`, `TEEDY_LANG`, `TEEDY_COPYFOLDER`, `TEEDY_FILEFILTER`, `TEEDY_URL`, `TEEDY_USERNAME` and `TEEDY_PASSWORD`.
The latter three have to be set for the importer to work. The value of `TEEDY_TAG` has to be set to the UUID of the tag, not the name (The UUID can be found by visiting `baseUrl/api/tag/list` in your browser).
Example usage:
```
docker run --name teedy-import -d -e TEEDY_TAG=2071fdf7-0e26-409d-b53d-f25823a5eb9e -e TEEDY_ADDTAGS=false -e TEEDY_LANG=eng -e TEEDY_URL='http://teedy.example.com:port' -e TEEDY_USERNAME=username -e TEEDY_PASSWORD=superSecretPassword -v /path/to/import/folder:/import sismics/docs-importer:latest
```
## Build from sources
Build from sources
------------------
```console ```console
npm install npm install
npm install -g pkg npm install -g pkg

Binary file not shown.

After

Width:  |  Height:  |  Size: 83 KiB

11
docs-importer/env.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/ash
file=/root/.config/preferences/com.sismics.docs.importer.pref
sed -i "s/env1/$TEEDY_TAG/g" $file
sed -i "s/env2/$TEEDY_ADDTAGS/g" $file
sed -i "s/env3/$TEEDY_LANG/g" $file
sed -i "s,env4,$TEEDY_URL,g" $file
sed -i "s/env5/$TEEDY_USERNAME/g" $file
sed -i "s/env6/$TEEDY_PASSWORD/g" $file
sed -i "s,env7,$TEEDY_COPYFOLDER,g" $file
sed -i "s,env8,$TEEDY_FILEFILTER,g" $file
echo "Environment variables replaced"

View File

@ -1,6 +1,7 @@
'use strict'; 'use strict';
const recursive = require('recursive-readdir'); const recursive = require('recursive-readdir');
const minimatch = require("minimatch");
const ora = require('ora'); const ora = require('ora');
const inquirer = require('inquirer'); const inquirer = require('inquirer');
const preferences = require('preferences'); const preferences = require('preferences');
@ -10,6 +11,7 @@ const _ = require('underscore');
const request = require('request').defaults({ const request = require('request').defaults({
jar: true jar: true
}); });
const qs = require('querystring');
// Load preferences // Load preferences
const prefs = new preferences('com.sismics.docs.importer',{ const prefs = new preferences('com.sismics.docs.importer',{
@ -22,9 +24,9 @@ const prefs = new preferences('com.sismics.docs.importer',{
}); });
// Welcome message // Welcome message
console.log('Sismics Docs Importer 1.0.0, https://www.sismicsdocs.com' + console.log('Teedy Importer 1.8, https://teedy.io' +
'\n\n' + '\n\n' +
'This program let you import files from your system to Sismics Docs' + 'This program let you import files from your system to Teedy' +
'\n'); '\n');
// Ask for the base URL // Ask for the base URL
@ -33,7 +35,7 @@ const askBaseUrl = () => {
{ {
type: 'input', type: 'input',
name: 'baseUrl', name: 'baseUrl',
message: 'What is the base URL of your Docs? (eg. https://docs.mycompany.com)', message: 'What is the base URL of your Teedy? (eg. https://teedy.mycompany.com)',
default: prefs.importer.baseUrl default: prefs.importer.baseUrl
} }
]).then(answers => { ]).then(answers => {
@ -42,12 +44,12 @@ const askBaseUrl = () => {
// Test base URL // Test base URL
const spinner = ora({ const spinner = ora({
text: 'Checking connection to Docs', text: 'Checking connection to Teedy',
spinner: 'flips' spinner: 'flips'
}).start(); }).start();
request(answers.baseUrl + '/api/app', function (error, response) { request(answers.baseUrl + '/api/app', function (error, response) {
if (!response || response.statusCode !== 200) { if (!response || response.statusCode !== 200) {
spinner.fail('Connection to Docs failed: ' + error); spinner.fail('Connection to Teedy failed: ' + error);
askBaseUrl(); askBaseUrl();
return; return;
} }
@ -82,7 +84,7 @@ const askCredentials = () => {
// Test credentials // Test credentials
const spinner = ora({ const spinner = ora({
text: 'Checking connection to Docs', text: 'Checking connection to Teedy',
spinner: 'flips' spinner: 'flips'
}).start(); }).start();
request.post({ request.post({
@ -141,11 +143,30 @@ const askPath = () => {
recursive(answers.path, function (error, files) { recursive(answers.path, function (error, files) {
spinner.succeed(files.length + ' files in this directory'); spinner.succeed(files.length + ' files in this directory');
askFileFilter();
});
});
});
});
};
// Ask for the file filter
const askFileFilter = () => {
console.log('');
inquirer.prompt([
{
type: 'input',
name: 'fileFilter',
message: 'What pattern do you want to use to match files? (eg. *.+(pdf|txt|jpg))',
default: prefs.importer.fileFilter || "*"
}
]).then(answers => {
// Save fileFilter
prefs.importer.fileFilter = answers.fileFilter;
askTag(); askTag();
}); });
});
});
});
}; };
// Ask for the tag to add // Ask for the tag to add
@ -176,7 +197,7 @@ const askTag = () => {
{ {
type: 'list', type: 'list',
name: 'tag', name: 'tag',
message: 'Which tag to add on imported documents?', message: 'Which tag to add to all imported documents?',
default: defaultTagName, default: defaultTagName,
choices: [ 'No tag' ].concat(_.pluck(tags, 'name')) choices: [ 'No tag' ].concat(_.pluck(tags, 'name'))
} }
@ -184,9 +205,107 @@ const askTag = () => {
// Save tag // Save tag
prefs.importer.tag = answers.tag === 'No tag' ? prefs.importer.tag = answers.tag === 'No tag' ?
'' : _.findWhere(tags, { name: answers.tag }).id; '' : _.findWhere(tags, { name: answers.tag }).id;
askAddTag();
});
});
};
const askAddTag = () => {
console.log('');
inquirer.prompt([
{
type: 'confirm',
name: 'addtags',
message: 'Do you want to add tags from the filename given with # ?',
default: prefs.importer.addtags === true
}
]).then(answers => {
// Save daemon
prefs.importer.addtags = answers.addtags;
// Save all preferences in case the program is sig-killed
askLang();
});
}
const askLang = () => {
console.log('');
// Load tags
const spinner = ora({
text: 'Loading default language',
spinner: 'flips'
}).start();
request.get({
url: prefs.importer.baseUrl + '/api/app',
}, function (error, response, body) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Connection to Teedy failed: ' + error);
askLang();
return;
}
spinner.succeed('Language loaded');
const defaultLang = prefs.importer.lang ? prefs.importer.lang : JSON.parse(body).default_language;
inquirer.prompt([
{
type: 'input',
name: 'lang',
message: 'Which should be the default language of the document?',
default: defaultLang
}
]).then(answers => {
// Save tag
prefs.importer.lang = answers.lang
askCopyFolder();
});
});
};
const askCopyFolder = () => {
console.log('');
inquirer.prompt([
{
type: 'input',
name: 'copyFolder',
message: 'Enter a path to copy files before they are deleted or leave empty to disable. The path must end with a \'/\' on MacOS and Linux or with a \'\\\' on Windows. Entering \'undefined\' will disable this again after setting the folder.',
default: prefs.importer.copyFolder
}
]).then(answers => {
// Save path
prefs.importer.copyFolder = answers.copyFolder=='undefined' ? '' : answers.copyFolder;
if (prefs.importer.copyFolder) {
// Test path
const spinner = ora({
text: 'Checking copy folder path',
spinner: 'flips'
}).start();
fs.lstat(answers.copyFolder, (error, stats) => {
if (error || !stats.isDirectory()) {
spinner.fail('Please enter a valid directory path');
askCopyFolder();
return;
}
fs.access(answers.copyFolder, fs.W_OK | fs.R_OK, (error) => {
if (error) {
spinner.fail('This directory is not writable');
askCopyFolder();
return;
}
spinner.succeed('Copy folder set!');
askDaemon(); askDaemon();
}); });
}); });
}
else {askDaemon();}
});
}; };
// Ask for daemon mode // Ask for daemon mode
@ -245,6 +364,8 @@ const start = () => {
// Import the files // Import the files
const importFiles = (remove, filesImported) => { const importFiles = (remove, filesImported) => {
recursive(prefs.importer.path, function (error, files) { recursive(prefs.importer.path, function (error, files) {
files = files.filter(minimatch.filter(prefs.importer.fileFilter ?? "*", {matchBase: true}));
if (files.length === 0) { if (files.length === 0) {
filesImported(); filesImported();
return; return;
@ -270,13 +391,64 @@ const importFile = (file, remove, resolve) => {
spinner: 'flips' spinner: 'flips'
}).start(); }).start();
request.put({ // Remove path of file
url: prefs.importer.baseUrl + '/api/document', let filename = file.replace(/^.*[\\\/]/, '');
form: {
title: file.replace(/^.*[\\\/]/, ''), // Get Tags given as hashtags from filename
language: 'eng', let taglist = filename.match(/#[^\s:#]+/mg);
taglist = taglist ? taglist.map(s => s.substr(1)) : [];
// Get available tags and UUIDs from server
request.get({
url: prefs.importer.baseUrl + '/api/tag/list',
}, function (error, response, body) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Error loading tags');
return;
}
let tagsarray = {};
for (let l of JSON.parse(body).tags) {
tagsarray[l.name] = l.id;
}
// Intersect tags from filename with existing tags on server
let foundtags = [];
for (let j of taglist) {
// If the tag is last in the filename it could include a file extension and would not be recognized
if (j.includes('.') && !tagsarray.hasOwnProperty(j) && !foundtags.includes(tagsarray[j])) {
while (j.includes('.') && !tagsarray.hasOwnProperty(j)) {
j = j.replace(/\.[^.]*$/,'');
}
}
if (tagsarray.hasOwnProperty(j) && !foundtags.includes(tagsarray[j])) {
foundtags.push(tagsarray[j]);
filename = filename.split('#'+j).join('');
}
}
if (prefs.importer.tag !== '' && !foundtags.includes(prefs.importer.tag)){
foundtags.push(prefs.importer.tag);
}
let data = {}
if (prefs.importer.addtags) {
data = {
title: prefs.importer.addtags ? filename : file.replace(/^.*[\\\/]/, '').substring(0, 100),
language: prefs.importer.lang || 'eng',
tags: foundtags
}
}
else {
data = {
title: prefs.importer.addtags ? filename : file.replace(/^.*[\\\/]/, '').substring(0, 100),
language: prefs.importer.lang || 'eng',
tags: prefs.importer.tag === '' ? undefined : prefs.importer.tag tags: prefs.importer.tag === '' ? undefined : prefs.importer.tag
} }
}
// Create document
request.put({
url: prefs.importer.baseUrl + '/api/document',
form: qs.stringify(data)
}, function (error, response, body) { }, function (error, response, body) {
if (error || !response || response.statusCode !== 200) { if (error || !response || response.statusCode !== 200) {
spinner.fail('Upload failed for ' + file + ': ' + error); spinner.fail('Upload failed for ' + file + ': ' + error);
@ -284,6 +456,7 @@ const importFile = (file, remove, resolve) => {
return; return;
} }
// Upload file
request.put({ request.put({
url: prefs.importer.baseUrl + '/api/file', url: prefs.importer.baseUrl + '/api/file',
formData: { formData: {
@ -298,11 +471,16 @@ const importFile = (file, remove, resolve) => {
} }
spinner.succeed('Upload successful for ' + file); spinner.succeed('Upload successful for ' + file);
if (remove) { if (remove) {
if (prefs.importer.copyFolder) {
fs.copyFileSync(file, prefs.importer.copyFolder + file.replace(/^.*[\\\/]/, ''));
fs.unlinkSync(file); fs.unlinkSync(file);
} }
else {fs.unlinkSync(file);}
}
resolve(); resolve();
}); });
}); });
});
}; };
// Entrypoint: daemon mode or wizard // Entrypoint: daemon mode or wizard
@ -312,7 +490,12 @@ if (argv.hasOwnProperty('d')) {
'Username: ' + prefs.importer.username + '\n' + 'Username: ' + prefs.importer.username + '\n' +
'Password: ***********\n' + 'Password: ***********\n' +
'Tag: ' + prefs.importer.tag + '\n' + 'Tag: ' + prefs.importer.tag + '\n' +
'Daemon mode: ' + prefs.importer.daemon); 'Add tags given #: ' + prefs.importer.addtags + '\n' +
'Language: ' + prefs.importer.lang + '\n' +
'Daemon mode: ' + prefs.importer.daemon + '\n' +
'Copy folder: ' + prefs.importer.copyFolder + '\n' +
'File filter: ' + prefs.importer.fileFilter
);
start(); start();
} else { } else {
askBaseUrl(); askBaseUrl();

View File

@ -1,6 +1,6 @@
{ {
"name": "docs-importer", "name": "teedy-importer",
"version": "1.5.1", "version": "1.8.0",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
@ -9,10 +9,10 @@
"resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
"integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=",
"requires": { "requires": {
"co": "4.6.0", "co": "^4.6.0",
"fast-deep-equal": "1.1.0", "fast-deep-equal": "^1.0.0",
"fast-json-stable-stringify": "2.0.0", "fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "0.3.1" "json-schema-traverse": "^0.3.0"
} }
}, },
"ansi-escapes": { "ansi-escapes": {
@ -30,7 +30,7 @@
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
"integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==", "integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
"requires": { "requires": {
"color-convert": "1.9.1" "color-convert": "^1.9.0"
} }
}, },
"argparse": { "argparse": {
@ -38,7 +38,7 @@
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
"requires": { "requires": {
"sprintf-js": "1.0.3" "sprintf-js": "~1.0.2"
} }
}, },
"asn1": { "asn1": {
@ -75,9 +75,8 @@
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz",
"integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
"optional": true,
"requires": { "requires": {
"tweetnacl": "0.14.5" "tweetnacl": "^0.14.3"
} }
}, },
"boom": { "boom": {
@ -85,7 +84,7 @@
"resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz",
"integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=",
"requires": { "requires": {
"hoek": "4.2.1" "hoek": "4.x.x"
} }
}, },
"brace-expansion": { "brace-expansion": {
@ -93,7 +92,7 @@
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": { "requires": {
"balanced-match": "1.0.0", "balanced-match": "^1.0.0",
"concat-map": "0.0.1" "concat-map": "0.0.1"
} }
}, },
@ -107,9 +106,9 @@
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.1.tgz", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.1.tgz",
"integrity": "sha512-QUU4ofkDoMIVO7hcx1iPTISs88wsO8jA92RQIm4JAwZvFGGAV2hSAA1NX7oVj2Ej2Q6NDTcRDjPTFrMCRZoJ6g==", "integrity": "sha512-QUU4ofkDoMIVO7hcx1iPTISs88wsO8jA92RQIm4JAwZvFGGAV2hSAA1NX7oVj2Ej2Q6NDTcRDjPTFrMCRZoJ6g==",
"requires": { "requires": {
"ansi-styles": "3.2.0", "ansi-styles": "^3.2.0",
"escape-string-regexp": "1.0.5", "escape-string-regexp": "^1.0.5",
"supports-color": "5.2.0" "supports-color": "^5.2.0"
} }
}, },
"chardet": { "chardet": {
@ -122,7 +121,7 @@
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz",
"integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=",
"requires": { "requires": {
"restore-cursor": "2.0.0" "restore-cursor": "^2.0.0"
} }
}, },
"cli-spinners": { "cli-spinners": {
@ -150,7 +149,7 @@
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz",
"integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==", "integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==",
"requires": { "requires": {
"color-name": "1.1.3" "color-name": "^1.1.1"
} }
}, },
"color-name": { "color-name": {
@ -163,7 +162,7 @@
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz",
"integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=",
"requires": { "requires": {
"delayed-stream": "1.0.0" "delayed-stream": "~1.0.0"
} }
}, },
"concat-map": { "concat-map": {
@ -181,7 +180,7 @@
"resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz",
"integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=",
"requires": { "requires": {
"boom": "5.2.0" "boom": "5.x.x"
}, },
"dependencies": { "dependencies": {
"boom": { "boom": {
@ -189,7 +188,7 @@
"resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz",
"integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==",
"requires": { "requires": {
"hoek": "4.2.1" "hoek": "4.x.x"
} }
} }
} }
@ -199,7 +198,7 @@
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
"requires": { "requires": {
"assert-plus": "1.0.0" "assert-plus": "^1.0.0"
} }
}, },
"defaults": { "defaults": {
@ -207,7 +206,7 @@
"resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz",
"integrity": "sha1-xlYFHpgX2f8I7YgUd/P+QBnz730=", "integrity": "sha1-xlYFHpgX2f8I7YgUd/P+QBnz730=",
"requires": { "requires": {
"clone": "1.0.3" "clone": "^1.0.2"
} }
}, },
"delayed-stream": { "delayed-stream": {
@ -219,9 +218,8 @@
"version": "0.1.1", "version": "0.1.1",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz",
"integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
"optional": true,
"requires": { "requires": {
"jsbn": "0.1.1" "jsbn": "~0.1.0"
} }
}, },
"escape-string-regexp": { "escape-string-regexp": {
@ -244,9 +242,9 @@
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.1.0.tgz", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.1.0.tgz",
"integrity": "sha512-E44iT5QVOUJBKij4IIV3uvxuNlbKS38Tw1HiupxEIHPv9qtC2PrDYohbXV5U+1jnfIXttny8gUhj+oZvflFlzA==", "integrity": "sha512-E44iT5QVOUJBKij4IIV3uvxuNlbKS38Tw1HiupxEIHPv9qtC2PrDYohbXV5U+1jnfIXttny8gUhj+oZvflFlzA==",
"requires": { "requires": {
"chardet": "0.4.2", "chardet": "^0.4.0",
"iconv-lite": "0.4.19", "iconv-lite": "^0.4.17",
"tmp": "0.0.33" "tmp": "^0.0.33"
} }
}, },
"extsprintf": { "extsprintf": {
@ -269,7 +267,7 @@
"resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz",
"integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=",
"requires": { "requires": {
"escape-string-regexp": "1.0.5" "escape-string-regexp": "^1.0.5"
} }
}, },
"forever-agent": { "forever-agent": {
@ -282,9 +280,9 @@
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz",
"integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=",
"requires": { "requires": {
"asynckit": "0.4.0", "asynckit": "^0.4.0",
"combined-stream": "1.0.6", "combined-stream": "1.0.6",
"mime-types": "2.1.18" "mime-types": "^2.1.12"
} }
}, },
"getpass": { "getpass": {
@ -292,7 +290,7 @@
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
"requires": { "requires": {
"assert-plus": "1.0.0" "assert-plus": "^1.0.0"
} }
}, },
"graceful-fs": { "graceful-fs": {
@ -310,8 +308,8 @@
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz",
"integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=",
"requires": { "requires": {
"ajv": "5.5.2", "ajv": "^5.1.0",
"har-schema": "2.0.0" "har-schema": "^2.0.0"
} }
}, },
"has-flag": { "has-flag": {
@ -324,10 +322,10 @@
"resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz",
"integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==",
"requires": { "requires": {
"boom": "4.3.1", "boom": "4.x.x",
"cryptiles": "3.1.2", "cryptiles": "3.x.x",
"hoek": "4.2.1", "hoek": "4.x.x",
"sntp": "2.1.0" "sntp": "2.x.x"
} }
}, },
"hoek": { "hoek": {
@ -340,9 +338,9 @@
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
"requires": { "requires": {
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"jsprim": "1.4.1", "jsprim": "^1.2.2",
"sshpk": "1.13.1" "sshpk": "^1.7.0"
} }
}, },
"iconv-lite": { "iconv-lite": {
@ -360,19 +358,19 @@
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-5.1.0.tgz", "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-5.1.0.tgz",
"integrity": "sha512-kn7N70US1MSZHZHSGJLiZ7iCwwncc7b0gc68YtlX29OjI3Mp0tSVV+snVXpZ1G+ONS3Ac9zd1m6hve2ibLDYfA==", "integrity": "sha512-kn7N70US1MSZHZHSGJLiZ7iCwwncc7b0gc68YtlX29OjI3Mp0tSVV+snVXpZ1G+ONS3Ac9zd1m6hve2ibLDYfA==",
"requires": { "requires": {
"ansi-escapes": "3.0.0", "ansi-escapes": "^3.0.0",
"chalk": "2.3.1", "chalk": "^2.0.0",
"cli-cursor": "2.1.0", "cli-cursor": "^2.1.0",
"cli-width": "2.2.0", "cli-width": "^2.0.0",
"external-editor": "2.1.0", "external-editor": "^2.1.0",
"figures": "2.0.0", "figures": "^2.0.0",
"lodash": "4.17.5", "lodash": "^4.3.0",
"mute-stream": "0.0.7", "mute-stream": "0.0.7",
"run-async": "2.3.0", "run-async": "^2.2.0",
"rxjs": "5.5.6", "rxjs": "^5.5.2",
"string-width": "2.1.1", "string-width": "^2.1.0",
"strip-ansi": "4.0.0", "strip-ansi": "^4.0.0",
"through": "2.3.8" "through": "^2.3.6"
} }
}, },
"is-fullwidth-code-point": { "is-fullwidth-code-point": {
@ -396,19 +394,18 @@
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
}, },
"js-yaml": { "js-yaml": {
"version": "3.10.0", "version": "3.13.1",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
"integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==", "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
"requires": { "requires": {
"argparse": "1.0.10", "argparse": "^1.0.7",
"esprima": "4.0.0" "esprima": "^4.0.0"
} }
}, },
"jsbn": { "jsbn": {
"version": "0.1.1", "version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
"optional": true
}, },
"json-schema": { "json-schema": {
"version": "0.2.3", "version": "0.2.3",
@ -437,16 +434,16 @@
} }
}, },
"lodash": { "lodash": {
"version": "4.17.5", "version": "4.17.15",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
"integrity": "sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw==" "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
}, },
"log-symbols": { "log-symbols": {
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz",
"integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==",
"requires": { "requires": {
"chalk": "2.3.1" "chalk": "^2.0.1"
} }
}, },
"mime-db": { "mime-db": {
@ -459,7 +456,7 @@
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz",
"integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==",
"requires": { "requires": {
"mime-db": "1.33.0" "mime-db": "~1.33.0"
} }
}, },
"mimic-fn": { "mimic-fn": {
@ -472,27 +469,20 @@
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": { "requires": {
"brace-expansion": "1.1.11" "brace-expansion": "^1.1.7"
} }
}, },
"minimist": { "minimist": {
"version": "1.2.0", "version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
}, },
"mkdirp": { "mkdirp": {
"version": "0.5.1", "version": "0.5.5",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
"requires": { "requires": {
"minimist": "0.0.8" "minimist": "^1.2.5"
},
"dependencies": {
"minimist": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
}
} }
}, },
"mute-stream": { "mute-stream": {
@ -510,7 +500,7 @@
"resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz",
"integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=",
"requires": { "requires": {
"mimic-fn": "1.2.0" "mimic-fn": "^1.0.0"
} }
}, },
"ora": { "ora": {
@ -518,12 +508,12 @@
"resolved": "https://registry.npmjs.org/ora/-/ora-2.0.0.tgz", "resolved": "https://registry.npmjs.org/ora/-/ora-2.0.0.tgz",
"integrity": "sha512-g+IR0nMUXq1k4nE3gkENbN4wkF0XsVZFyxznTF6CdmwQ9qeTGONGpSR9LM5//1l0TVvJoJF3MkMtJp6slUsWFg==", "integrity": "sha512-g+IR0nMUXq1k4nE3gkENbN4wkF0XsVZFyxznTF6CdmwQ9qeTGONGpSR9LM5//1l0TVvJoJF3MkMtJp6slUsWFg==",
"requires": { "requires": {
"chalk": "2.3.1", "chalk": "^2.3.1",
"cli-cursor": "2.1.0", "cli-cursor": "^2.1.0",
"cli-spinners": "1.1.0", "cli-spinners": "^1.1.0",
"log-symbols": "2.2.0", "log-symbols": "^2.2.0",
"strip-ansi": "4.0.0", "strip-ansi": "^4.0.0",
"wcwidth": "1.0.1" "wcwidth": "^1.0.1"
} }
}, },
"os-homedir": { "os-homedir": {
@ -546,11 +536,11 @@
"resolved": "https://registry.npmjs.org/preferences/-/preferences-1.0.2.tgz", "resolved": "https://registry.npmjs.org/preferences/-/preferences-1.0.2.tgz",
"integrity": "sha512-cRjA8Galk1HDDBOKjx6DhTwfy5+FVZtH7ogg6rgTLX8Ak4wi55RaS4uRztJuVPd+md1jZo99bH/h1Q9bQQK8bg==", "integrity": "sha512-cRjA8Galk1HDDBOKjx6DhTwfy5+FVZtH7ogg6rgTLX8Ak4wi55RaS4uRztJuVPd+md1jZo99bH/h1Q9bQQK8bg==",
"requires": { "requires": {
"graceful-fs": "4.1.11", "graceful-fs": "^4.1.2",
"js-yaml": "3.10.0", "js-yaml": "^3.10.0",
"mkdirp": "0.5.1", "mkdirp": "^0.5.1",
"os-homedir": "1.0.2", "os-homedir": "^1.0.1",
"write-file-atomic": "1.3.4" "write-file-atomic": "^1.1.3"
} }
}, },
"punycode": { "punycode": {
@ -559,9 +549,9 @@
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
}, },
"qs": { "qs": {
"version": "6.5.1", "version": "6.9.4",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.4.tgz",
"integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" "integrity": "sha512-A1kFqHekCTM7cz0udomYUoYNWjBebHm/5wzU/XqrBRBNWectVH0QIiN+NEcZ0Dte5hvzHwbr8+XQmguPhJ6WdQ=="
}, },
"recursive-readdir": { "recursive-readdir": {
"version": "2.2.2", "version": "2.2.2",
@ -576,28 +566,35 @@
"resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz",
"integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==", "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==",
"requires": { "requires": {
"aws-sign2": "0.7.0", "aws-sign2": "~0.7.0",
"aws4": "1.6.0", "aws4": "^1.6.0",
"caseless": "0.12.0", "caseless": "~0.12.0",
"combined-stream": "1.0.6", "combined-stream": "~1.0.5",
"extend": "3.0.1", "extend": "~3.0.1",
"forever-agent": "0.6.1", "forever-agent": "~0.6.1",
"form-data": "2.3.2", "form-data": "~2.3.1",
"har-validator": "5.0.3", "har-validator": "~5.0.3",
"hawk": "6.0.2", "hawk": "~6.0.2",
"http-signature": "1.2.0", "http-signature": "~1.2.0",
"is-typedarray": "1.0.0", "is-typedarray": "~1.0.0",
"isstream": "0.1.2", "isstream": "~0.1.2",
"json-stringify-safe": "5.0.1", "json-stringify-safe": "~5.0.1",
"mime-types": "2.1.18", "mime-types": "~2.1.17",
"oauth-sign": "0.8.2", "oauth-sign": "~0.8.2",
"performance-now": "2.1.0", "performance-now": "^2.1.0",
"qs": "6.5.1", "qs": "~6.5.1",
"safe-buffer": "5.1.1", "safe-buffer": "^5.1.1",
"stringstream": "0.0.5", "stringstream": "~0.0.5",
"tough-cookie": "2.3.4", "tough-cookie": "~2.3.3",
"tunnel-agent": "0.6.0", "tunnel-agent": "^0.6.0",
"uuid": "3.2.1" "uuid": "^3.1.0"
},
"dependencies": {
"qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
}
} }
}, },
"restore-cursor": { "restore-cursor": {
@ -605,8 +602,8 @@
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
"integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=",
"requires": { "requires": {
"onetime": "2.0.1", "onetime": "^2.0.0",
"signal-exit": "3.0.2" "signal-exit": "^3.0.2"
} }
}, },
"run-async": { "run-async": {
@ -614,7 +611,7 @@
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
"integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
"requires": { "requires": {
"is-promise": "2.1.0" "is-promise": "^2.1.0"
} }
}, },
"rxjs": { "rxjs": {
@ -630,6 +627,11 @@
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
}, },
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"signal-exit": { "signal-exit": {
"version": "3.0.2", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
@ -645,7 +647,7 @@
"resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz",
"integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==",
"requires": { "requires": {
"hoek": "4.2.1" "hoek": "4.x.x"
} }
}, },
"sprintf-js": { "sprintf-js": {
@ -654,18 +656,19 @@
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
}, },
"sshpk": { "sshpk": {
"version": "1.13.1", "version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
"requires": { "requires": {
"asn1": "0.2.3", "asn1": "~0.2.3",
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"bcrypt-pbkdf": "1.0.1", "bcrypt-pbkdf": "^1.0.0",
"dashdash": "1.14.1", "dashdash": "^1.12.0",
"ecc-jsbn": "0.1.1", "ecc-jsbn": "~0.1.1",
"getpass": "0.1.7", "getpass": "^0.1.1",
"jsbn": "0.1.1", "jsbn": "~0.1.0",
"tweetnacl": "0.14.5" "safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
} }
}, },
"string-width": { "string-width": {
@ -673,8 +676,8 @@
"resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
"requires": { "requires": {
"is-fullwidth-code-point": "2.0.0", "is-fullwidth-code-point": "^2.0.0",
"strip-ansi": "4.0.0" "strip-ansi": "^4.0.0"
} }
}, },
"stringstream": { "stringstream": {
@ -687,7 +690,7 @@
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
"integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
"requires": { "requires": {
"ansi-regex": "3.0.0" "ansi-regex": "^3.0.0"
} }
}, },
"supports-color": { "supports-color": {
@ -695,7 +698,7 @@
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.2.0.tgz", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.2.0.tgz",
"integrity": "sha512-F39vS48la4YvTZUPVeTqsjsFNrvcMwrV3RLZINsmHo+7djCvuUzSIeXOnZ5hmjef4bajL1dNccN+tg5XAliO5Q==", "integrity": "sha512-F39vS48la4YvTZUPVeTqsjsFNrvcMwrV3RLZINsmHo+7djCvuUzSIeXOnZ5hmjef4bajL1dNccN+tg5XAliO5Q==",
"requires": { "requires": {
"has-flag": "3.0.0" "has-flag": "^3.0.0"
} }
}, },
"symbol-observable": { "symbol-observable": {
@ -713,7 +716,7 @@
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"requires": { "requires": {
"os-tmpdir": "1.0.2" "os-tmpdir": "~1.0.2"
} }
}, },
"tough-cookie": { "tough-cookie": {
@ -721,7 +724,7 @@
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz",
"integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==", "integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==",
"requires": { "requires": {
"punycode": "1.4.1" "punycode": "^1.4.1"
} }
}, },
"tunnel-agent": { "tunnel-agent": {
@ -729,14 +732,13 @@
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
"requires": { "requires": {
"safe-buffer": "5.1.1" "safe-buffer": "^5.0.1"
} }
}, },
"tweetnacl": { "tweetnacl": {
"version": "0.14.5", "version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
"optional": true
}, },
"underscore": { "underscore": {
"version": "1.8.3", "version": "1.8.3",
@ -753,9 +755,9 @@
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
"requires": { "requires": {
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"core-util-is": "1.0.2", "core-util-is": "1.0.2",
"extsprintf": "1.3.0" "extsprintf": "^1.2.0"
} }
}, },
"wcwidth": { "wcwidth": {
@ -763,7 +765,7 @@
"resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz",
"integrity": "sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g=", "integrity": "sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g=",
"requires": { "requires": {
"defaults": "1.0.3" "defaults": "^1.0.3"
} }
}, },
"write-file-atomic": { "write-file-atomic": {
@ -771,9 +773,9 @@
"resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-1.3.4.tgz", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-1.3.4.tgz",
"integrity": "sha1-+Aek8LHZ6ROuekgRLmzDrxmRtF8=", "integrity": "sha1-+Aek8LHZ6ROuekgRLmzDrxmRtF8=",
"requires": { "requires": {
"graceful-fs": "4.1.11", "graceful-fs": "^4.1.11",
"imurmurhash": "0.1.4", "imurmurhash": "^0.1.4",
"slide": "1.1.6" "slide": "^1.1.5"
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show More