Skip to content
Snippets Groups Projects
Commit 419247d0 authored by Adam Laskowski's avatar Adam Laskowski
Browse files

SSDM-13329 Removed rtd_phosphonetx project

parent 5e3ab51a
No related branches found
No related tags found
1 merge request!40SSDM-13578 : 2PT : Database and V3 Implementation - include the new AFS "free"...
Showing
with 128 additions and 854 deletions
# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1
data-store-server-code=DSS1
# host name of the machine on which the datastore server is running
host-address = http://localhost
host-address=http://localhost
# parent directory of the store directory and all the dropboxes
root-dir = data
root-dir=data
# The root directory of the data store
storeroot-dir = ${root-dir}/store
incoming-root-dir = ${root-dir}
storeroot-dir=${root-dir}/store
incoming-root-dir=${root-dir}
# Cache for data set files from other Data Store Servers
cache-workspace-folder ${root-dir}/dss-cache
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir =
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir=
# Port
port = 20001
use-ssl = false
port=20001
use-ssl=false
# Session timeout in minutes
session-timeout = 720
session-timeout=720
# Path to the keystore
keystore.path = etc/openBIS.keystore
keystore.path=etc/openBIS.keystore
# Password of the keystore
keystore.password = changeit
keystore.password=changeit
# Key password of the keystore
keystore.key-password = changeit
keystore.key-password=changeit
# The check interval (in seconds)
check-interval = 5
quiet-period = 10
data-set-locking-timeout = 1
check-interval=5
quiet-period=10
data-set-locking-timeout=1
# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something
# lower than 180.
shutdown-timeout = 180
shutdown-timeout=180
# The minimum time (in seconds) of availability of the data stream since moment when user requested
# for the data stream url. If not specified default value (20 seconds) will be used.
# minimum-time-to-keep-streams-in-sec = 20
#�If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching. There are 2 different kinds
# of highwater mark supported: the one that is checking the space on the store, and one that is
# checking the amount of free space for recovery state (on the local filesystem).
highwater-mark = -1
recovery-highwater-mark = -1
# If a data set is successfully registered it sends out an email to the registrator.
highwater-mark=-1
recovery-highwater-mark=-1
# If a data set is successfully registered it sends out an email to the registrator.
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false
notify-successful-registration=false
# The URL of the openBIS server
server-url = ${host-address}:20000
server-url=${host-address}:20000
# The username to use when contacting the openBIS server
username = etlserver
username=etlserver
# The password for the etlserver user who contacts the openBIS server
password=e1AWO4f0ro
# The base URL for Web client access to the data store server.
download-url = ${host-address}:${port}
download-url=${host-address}:${port}
# SMTP properties (must start with 'mail' to be considered).
# The current configuration saves the emails in the file system in the root directory
mail.smtp.host = file://${root-dir}/email
mail.smtp.host=file://${root-dir}/email
# mail.smtp.host = localhost
# mail.from = openbis-dss@localhost
# mail.smtp.user =
# mail.smtp.password =
# Data sources
data-sources = path-info-db
data-sources=path-info-db
# Data source for pathinfo database
path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
path-info-db.databaseEngineCode = postgresql
path-info-db.basicDatabaseName = pathinfo
path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
path-info-db.databaseEngineCode=postgresql
path-info-db.basicDatabaseName=pathinfo
# The host and optionally port. Default is 'localhost'.
# path-info-db.urlHostPart =
path-info-db.databaseKind = test_js_common
path-info-db.scriptFolder = ../../../../datastore_server/source/sql
path-info-db.databaseKind=test_js_common
path-info-db.scriptFolder=../../../../datastore_server/source/sql
# ---------------------------------------------------------------------------
# maintenance plugins configuration
# ---------------------------------------------------------------------------
# Comma separated names of maintenance plugins. Each plugin should have configuration properties prefixed with its name.
maintenance-plugins = post-registration, path-info-deletion
# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
post-registration.interval = 30
post-registration.cleanup-tasks-folder = ${root-dir}/post-registration/cleanup-tasks
post-registration.last-seen-data-set-file = ${root-dir}/post-registration/last-seen-data-set.txt
post-registration.post-registration-tasks = pathinfo-feeding, notifying
post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
post-registration.pathinfo-feeding.compute-checksum = true
maintenance-plugins=post-registration, path-info-deletion
# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
post-registration.class=ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
post-registration.interval=30
post-registration.cleanup-tasks-folder=${root-dir}/post-registration/cleanup-tasks
post-registration.last-seen-data-set-file=${root-dir}/post-registration/last-seen-data-set.txt
post-registration.post-registration-tasks=pathinfo-feeding, notifying
post-registration.pathinfo-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
post-registration.pathinfo-feeding.compute-checksum=true
# The NotifyingTask throws an exception. Thus, post-registration will always fail.
# This is needed to have an example of a data set which isn't post-registered.
post-registration.notifying.class = ch.systemsx.cisd.etlserver.postregistration.NotifyingTask
post-registration.notifying.destination-path-template = targets/${unknown-placeholder-preventing-post-registration}
post-registration.notifying.message-template = hello
post-registration.notifying.class=ch.systemsx.cisd.etlserver.postregistration.NotifyingTask
post-registration.notifying.destination-path-template=targets/${unknown-placeholder-preventing-post-registration}
post-registration.notifying.message-template=hello
# Maintenance task for deleting entries from pathinfo database after deletion of a data set
path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
path-info-deletion.interval = 120
path-info-deletion.data-source = path-info-db
path-info-deletion.data-set-table-name = data_sets
path-info-deletion.data-set-perm-id = CODE
path-info-deletion.class=ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
path-info-deletion.interval=120
path-info-deletion.data-source=path-info-db
path-info-deletion.data-set-table-name=data_sets
path-info-deletion.data-set-perm-id=CODE
# ---------------------------------------------------------------------------
# processing plugins configuration
# ---------------------------------------------------------------------------
# Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
processing-plugins = path-info-db-consistency-check
processing-plugins=path-info-db-consistency-check
# Processing task that checks the consistency between the data store and the meta information stored in the PathInfoDB.
# It sends out an email which contains all differences found.
path-info-db-consistency-check.label = Path Info DB consistency check
path-info-db-consistency-check.dataset-types = .*
path-info-db-consistency-check.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin
path-info-db-consistency-check.label=Path Info DB consistency check
path-info-db-consistency-check.dataset-types=.*
path-info-db-consistency-check.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin
# ---------------------------------------------------------------------------
# dropbox configurations
# ---------------------------------------------------------------------------
inputs = default-dropbox
dss-rpc.put-default = default-dropbox
default-dropbox.incoming-dir = ${incoming-root-dir}/incoming-default
default-dropbox.incoming-data-completeness-condition = auto-detection
default-dropbox.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2
default-dropbox.program-class = ch.systemsx.cisd.etlserver.registrator.DefaultDropbox
default-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
inputs=default-dropbox
dss-rpc.put-default=default-dropbox
default-dropbox.incoming-dir=${incoming-root-dir}/incoming-default
default-dropbox.incoming-data-completeness-condition=auto-detection
default-dropbox.top-level-data-set-handler=ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2
default-dropbox.program-class=ch.systemsx.cisd.etlserver.registrator.DefaultDropbox
default-dropbox.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
#default-dropbox.validation-script-path = ../core-plugins/default/default-validation-script.py
# ---------------------------------------------------------------------------
# Archiver configuration (optional)
# ---------------------------------------------------------------------------
# Configuration of an archiver task. All properties are prefixed with 'archiver.'.
# Archiver class specification (together with the list of packages this class belongs to).
archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
archiver.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
# ---------------------------------------------------------------------------
# Imaging database for screening (optional, only used if screening technology switched on)
# ---------------------------------------------------------------------------
# Format: <host>[:<port>]. Default: localhost
# imaging-database.url-host-part =
# Default: prod.
imaging-database.kind = test_js_common
proteomics-database-kind = test_js_common
screening-sql-root-folder = ../../../../screening/source/
proteomics-sql-root-folder = ../../../../rtd_phosphonetx/source/
imaging-database.kind=test_js_common
proteomics-database-kind=test_js_common
screening-sql-root-folder=../../../../screening/source/
# ---------------------------------------------------------------------------
# INTERNAL CONFIGURATION,
# Do not change this part unless you are developing openBIS extensions.
# ---------------------------------------------------------------------------
jython-version=2.7
# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS2
data-store-server-code=DSS2
# host name of the machine on which the datastore server is running
host-address = http://localhost
host-address=http://localhost
# parent directory of the store directory and all the dropboxes
root-dir = data
root-dir=data
# The root directory of the data store
storeroot-dir = ${root-dir}/store
incoming-root-dir = ${root-dir}
storeroot-dir=${root-dir}/store
incoming-root-dir=${root-dir}
# Cache for data set files from other Data Store Servers
cache-workspace-folder = ${root-dir}/dss-cache
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir =
cache-workspace-folder=${root-dir}/dss-cache
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir=
# Port
port = 20002
use-ssl = false
port=20002
use-ssl=false
# Session timeout in minutes
session-timeout = 720
session-timeout=720
# Path to the keystore
keystore.path = etc/openBIS.keystore
keystore.path=etc/openBIS.keystore
# Password of the keystore
keystore.password = changeit
keystore.password=changeit
# Key password of the keystore
keystore.key-password = changeit
keystore.key-password=changeit
# The check interval (in seconds)
check-interval = 5
quiet-period = 10
data-set-locking-timeout = 1
check-interval=5
quiet-period=10
data-set-locking-timeout=1
# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something
# lower than 180.
shutdown-timeout = 180
shutdown-timeout=180
# The minimum time (in seconds) of availability of the data stream since moment when user requested
# for the data stream url. If not specified default value (20 seconds) will be used.
# minimum-time-to-keep-streams-in-sec = 20
#�If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching. There are 2 different kinds
# of highwater mark supported: the one that is checking the space on the store, and one that is
# checking the amount of free space for recovery state (on the local filesystem).
highwater-mark = -1
recovery-highwater-mark = -1
# If a data set is successfully registered it sends out an email to the registrator.
highwater-mark=-1
recovery-highwater-mark=-1
# If a data set is successfully registered it sends out an email to the registrator.
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false
notify-successful-registration=false
# The URL of the openBIS server
server-url = ${host-address}:20000
server-url=${host-address}:20000
# The username to use when contacting the openBIS server
username = etlserver
username=etlserver
# The password for the etlserver user who contacts the openBIS server
password=e1AWO4f0ro
# The base URL for Web client access to the data store server.
download-url = ${host-address}:${port}
download-url=${host-address}:${port}
# SMTP properties (must start with 'mail' to be considered).
# The current configuration saves the emails in the file system in the root directory
mail.smtp.host = file://${root-dir}/email
mail.smtp.host=file://${root-dir}/email
# mail.smtp.host = localhost
# mail.from = openbis-dss@localhost
# mail.smtp.user =
# mail.smtp.password =
# Data sources
data-sources = path-info-db
data-sources=path-info-db
# Data source for pathinfo database
path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
path-info-db.databaseEngineCode = postgresql
path-info-db.basicDatabaseName = pathinfo
path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
path-info-db.databaseEngineCode=postgresql
path-info-db.basicDatabaseName=pathinfo
# The host and optionally port. Default is 'localhost'.
# path-info-db.urlHostPart =
path-info-db.databaseKind = test_js_common2
path-info-db.scriptFolder = ../../../../datastore_server/source/sql
path-info-db.databaseKind=test_js_common2
path-info-db.scriptFolder=../../../../datastore_server/source/sql
# ---------------------------------------------------------------------------
# maintenance plugins configuration
# ---------------------------------------------------------------------------
# Comma separated names of maintenance plugins. Each plugin should have configuration properties prefixed with its name.
maintenance-plugins = post-registration, path-info-deletion
# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
post-registration.interval = 30
post-registration.cleanup-tasks-folder = ${root-dir}/post-registration/cleanup-tasks
post-registration.last-seen-data-set-file = ${root-dir}/post-registration/last-seen-data-set.txt
post-registration.post-registration-tasks = pathinfo-feeding, notifying
post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
post-registration.pathinfo-feeding.compute-checksum = true
maintenance-plugins=post-registration, path-info-deletion
# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
post-registration.class=ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
post-registration.interval=30
post-registration.cleanup-tasks-folder=${root-dir}/post-registration/cleanup-tasks
post-registration.last-seen-data-set-file=${root-dir}/post-registration/last-seen-data-set.txt
post-registration.post-registration-tasks=pathinfo-feeding, notifying
post-registration.pathinfo-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
post-registration.pathinfo-feeding.compute-checksum=true
# The NotifyingTask throws an exception. Thus, post-registration will always fail.
# This is needed to have an example of a data set which isn't post-registered.
post-registration.notifying.class = ch.systemsx.cisd.etlserver.postregistration.NotifyingTask
post-registration.notifying.destination-path-template = targets/${unknown-placeholder-preventing-post-registration}
post-registration.notifying.message-template = hello
post-registration.notifying.class=ch.systemsx.cisd.etlserver.postregistration.NotifyingTask
post-registration.notifying.destination-path-template=targets/${unknown-placeholder-preventing-post-registration}
post-registration.notifying.message-template=hello
# Maintenance task for deleting entries from pathinfo database after deletion of a data set
path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
path-info-deletion.interval = 120
path-info-deletion.data-source = path-info-db
path-info-deletion.data-set-table-name = data_sets
path-info-deletion.data-set-perm-id = CODE
path-info-deletion.class=ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
path-info-deletion.interval=120
path-info-deletion.data-source=path-info-db
path-info-deletion.data-set-table-name=data_sets
path-info-deletion.data-set-perm-id=CODE
# ---------------------------------------------------------------------------
# processing plugins configuration
# ---------------------------------------------------------------------------
# Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
processing-plugins = path-info-db-consistency-check
processing-plugins=path-info-db-consistency-check
# Processing task that checks the consistency between the data store and the meta information stored in the PathInfoDB.
# It sends out an email which contains all differences found.
path-info-db-consistency-check.label = Path Info DB consistency check
path-info-db-consistency-check.dataset-types = .*
path-info-db-consistency-check.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin
path-info-db-consistency-check.label=Path Info DB consistency check
path-info-db-consistency-check.dataset-types=.*
path-info-db-consistency-check.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin
# ---------------------------------------------------------------------------
# dropbox configurations
# ---------------------------------------------------------------------------
inputs = default-dropbox
dss-rpc.put-default = default-dropbox
default-dropbox.incoming-dir = ${incoming-root-dir}/incoming-default
default-dropbox.incoming-data-completeness-condition = auto-detection
default-dropbox.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2
default-dropbox.program-class = ch.systemsx.cisd.etlserver.registrator.DefaultDropbox
default-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
default-dropbox.validation-script-path = ../core-plugins/default/default-validation-script.py
inputs=default-dropbox
dss-rpc.put-default=default-dropbox
default-dropbox.incoming-dir=${incoming-root-dir}/incoming-default
default-dropbox.incoming-data-completeness-condition=auto-detection
default-dropbox.top-level-data-set-handler=ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2
default-dropbox.program-class=ch.systemsx.cisd.etlserver.registrator.DefaultDropbox
default-dropbox.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
default-dropbox.validation-script-path=../core-plugins/default/default-validation-script.py
# ---------------------------------------------------------------------------
# Archiver configuration (optional)
# ---------------------------------------------------------------------------
# Configuration of an archiver task. All properties are prefixed with 'archiver.'.
# Archiver class specification (together with the list of packages this class belongs to).
archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
archiver.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
# ---------------------------------------------------------------------------
# Imaging database for screening (optional, only used if screening technology switched on)
# ---------------------------------------------------------------------------
# Format: <host>[:<port>]. Default: localhost
# imaging-database.url-host-part =
# Default: prod.
imaging-database.kind = test_js_common2
proteomics-database-kind = test_js_common2
screening-sql-root-folder = ../../../../screening/source/
proteomics-sql-root-folder = ../../../../rtd_phosphonetx/source/
imaging-database.kind=test_js_common2
proteomics-database-kind=test_js_common2
screening-sql-root-folder=../../../../screening/source/
# ---------------------------------------------------------------------------
# INTERNAL CONFIGURATION,
# Do not change this part unless you are developing openBIS extensions.
# ---------------------------------------------------------------------------
jython-version=2.7
......@@ -20,10 +20,6 @@
<param name="project" value="deep_sequencing_unit" />
<param name="pattern" value="*"/>
</antcall>
<antcall target="_dist">
<param name="project" value="rtd_phosphonetx" />
<param name="pattern" value="*"/>
</antcall>
<antcall target="_dist">
<param name="project" value="rtd_yeastx" />
<param name="pattern" value="*"/>
......@@ -91,9 +87,6 @@
<antcall target="_clean">
<param name="project" value="deep_sequencing_unit"/>
</antcall>
<antcall target="_clean">
<param name="project" value="rtd_phosphonetx"/>
</antcall>
<antcall target="_clean">
<param name="project" value="rtd_yeastx"/>
</antcall>
......
......@@ -39,7 +39,6 @@ openbis-common \
openbis_standard_technologies \
openbis_api \
plasmid \
rtd_phosphonetx \
rtd_yeastx \
screening \
ui-test\
......
/etc
/tomcat
/targets
/test-output
/.updater
/.dynamic_property_evaluator_queue
/bin
/build/
/.idea/
*.iml
*.eml
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?>
<pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
</pydev_project>
File deleted
evaluationDependsOn(':commonbase')
evaluationDependsOn(':common')
evaluationDependsOn(':openbis_api')
evaluationDependsOn(':openbis-common')
evaluationDependsOn(':authentication')
evaluationDependsOn(':dbmigration')
evaluationDependsOn(':openbis')
evaluationDependsOn(':datastore_server')
apply from: '../gradle/javaproject.gradle'
dependencies {
compile project(':common'),
project(':datastore_server')
testCompile project(path: ':datastore_server', configuration: 'tests')
}
sourceSets {
test {
resources {
srcDirs = ['source/java']
}
}
}
jar {
from('../rtd_phosphonetx/source/sql'){
into("/sql")
include "proteomics/**/*.sql"
}
}
task systemTestSuite(type: Test) {
useTestNG()
options.suites('sourceTest/java/tests_system.xml')
jvmArgs '-Xmx2048m', '-XX:MaxPermSize=256m'
reports.html.destination = file("${project.buildDir}/reports/tests-system")
}
test.dependsOn(systemTestSuite)
task zip(type: Zip) {
includeEmptyDirs false
from (jar.archivePath) {
into 'datastore_server/lib/'
rename 'datastore_server_plugin-proteomics(.*)\\.jar', 'datastore_server_plugin-proteomics.jar'
}
from (fileTree(dir: 'source/core-plugins', includes:['proteomics/**', 'proteomics-optional/**'], excludes:['**/as/**', '**/package-to-dist'])) {
into 'core-plugins'
}
}
zip.dependsOn jar
build.dependsOn zip
apply from: 'gwtdev.gradle'
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
<appender name="STDOUT" class="org.apache.log4j.ConsoleAppender">
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/>
</layout>
</appender>
<appender name="NULL" class="org.apache.log4j.varia.NullAppender" />
<root>
<priority value ="info" />
<appender-ref ref="STDOUT" />
</root>
</log4j:configuration>
test:a@admins.com:A:The Admin:J3fII6Pex7jnCBwF+uXz2mFuB1QVhPUi
u:u@users.com:U:The User:dmbGKaGRmbX8YKfslMxUHObmYfjywkuT
o:o@observers.com:O:The Observer:t53ADCnFnEFhBvHB7FPoHhbHeW2O1KJc
etlserver::::SHGHSPawL/B3NKXD5nsu4fSrj5LwR2MX
# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1
root-dir = targets/playground
# The root directory of the data store
storeroot-dir = ${root-dir}/data/store
incoming-root-dir = ${root-dir}/data
core-plugins-folder = source/core-plugins
proteomics-database-kind = dev
proteomics-sql-root-folder = source/
session-workspace-root-dir = ${incoming-root-dir}/session-workspace
dss-rpc.put-default = default-dropbox
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir =
# Port
port = 8889
# Session timeout in minutes
session-timeout = 720
# Path to the keystore
keystore.path = ../datastore_server/dist/etc/openBIS.keystore
# Password of the keystore
keystore.password = changeit
# Key password of the keystore
keystore.key-password = changeit
# The check interval (in seconds)
check-interval = 5
# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something
# lower than 180.
shutdown-timeout = 2
# If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching.
highwater-mark = -1
# If a data set is successfully registered it sends out an email to the registrator.
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false
dss-temp-dir = ${root-dir}/dss-temp
dss-registration-log-dir = ${root-dir}/dss-registration-logs
dss-recovery-state-dir = ${root-dir}/recovery-state
use-ssl = false
# The URL of the openBIS server
server-url = http://localhost:8888/openbis
server-timeout-in-minutes = 10
# The username to use when contacting the openBIS server
username = etlserver
# The password to use when contacting the openBIS server
password = etlserver
# The base URL for Web client access.
download-url = http://localhost:8889
# SMTP properties (must start with 'mail' to be considered).
mail.smtp.host = file://targets/email
mail.from = datastore_server@localhost
# ---------------- Timing parameters for file system operations on remote shares.
# Time (in seconds) to wait for any file system operation to finish. Operations exceeding this
# timeout will be terminated.
timeout = 60
# Number of times that a timed out operation will be tried again (0 means: every file system
# operation will only ever be performed once).
max-retries = 11
# Time (in seconds) to wait after an operation has been timed out before re-trying.
failure-interval = 10
# The period of no write access that needs to pass before an incoming data item is considered
# complete and ready to be processed (in seconds) [default: 300].
# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
quiet-period = 10
# ---------------------------------------------------------------------------
# Globally used separator character which separates entities in a data set file name
data-set-file-name-entity-separator = _
# ---------------------------------------------------------------------------
# dropbox configurations
# ---------------------------------------------------------------------------
inputs = default-dropbox
dss-rpc.put-default = default-dropbox
default-dropbox.incoming-dir = ${incoming-root-dir}/incoming-default
default-dropbox.incoming-data-completeness-condition = auto-detection
default-dropbox.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2
default-dropbox.program-class = ch.systemsx.cisd.etlserver.registrator.DefaultDropbox
default-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
jython-version=2.7
# Maximal number of visible columns in tables. Default: 50.
max-visible-columns = 15
enable-trash = true
technologies = proteomics
# Relative path of cache. Default value is 'cache'.
proteomics.cache-folder = targets/cache
# Minimum free disk space needed for the cache. Default value is 1 GB.
#proteomics.minimum-free-disk-space-in-MB = 1024
# Maximum retention time. Data older than this time will be removed from cache. Default value is a week.
#proteomics.maximum-retention-time-in-days = 7
File deleted
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=http\://svnsis.ethz.ch/repos/cisd/ivy-repository/trunk/gradle/distribution/5.6.4/gradle-5.6.4-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save ( ) {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
configurations.create('gwt')
if (System.getProperty("openbis.development-build") != null) {
configurations.gwt {
transitive = false
}
dependencies {
gwt 'reveregroup:gwt-image-loader:1.1.4',
'google:gwt-dev:2.4',
'google:gwt-user:2.4',
'google:gwt-debug-panel:1.0',
'sencha:gxt:2.2.5',
'sis:sis-base:18.09.0',
'fasterxml:jackson-annotations:2.9.10',
project(':common'),
project(path:':common', configuration: 'archives'),
project(path:':openbis_api'),
project(path:':openbis_api', configuration: 'archives'),
project(path:':openbis-common'),
project(path:':openbis-common', configuration: 'archives'),
project(path:':openbis'),
project(path:':openbis', configuration: 'archives'),
project(path:':rtd_phosphonetx', configuration: 'archives')
}
}
task deleteGWT(type: Exec) {
executable "rm"
args "-rf", "${project.projectDir.absolutePath}/.gwt-cache", "${project.projectDir.absolutePath}/targets/www"
}
task prepareGWT(type:Copy, dependsOn: [deleteGWT, compileJava]) {
from 'resource/server'
from files('../common/resource/server') {
exclude 'gwt.xml'
}
from file('../openbis/resource/server/bis-common.xml')
into 'targets/www/WEB-INF'
from (project.configurations.runtime.files) {
into 'lib'
exclude 'cisd-cifex*'
exclude 'common-*'
exclude 'authentication-*'
exclude 'dbmigration-*'
exclude 'openbis-common-*'
exclude 'openbis_api-*'
exclude 'openbis-*'
exclude 'datastore_server-*'
}
from ('source/java/service.properties') {
into 'classes'
}
doLast {
file("targets/www/WEB-INF/gwt.xml").createNewFile()
}
}
task compileGWTToTemp(type: JavaExec, dependsOn: prepareGWT) {
//inputs.source
sourceSets.main.java.srcDirs
inputs.dir sourceSets.main.output.resourcesDir
outputs.dir 'targets/www/gwt-temp'
main = 'com.google.gwt.dev.Compiler'
maxHeapSize = '1024m'
classpath {
[
sourceSets.main.java.srcDirs,
project(":openbis").sourceSets.main.java.srcDirs,
project(":openbis").sourceSets.main.output.resourcesDir,
project(":openbis").sourceSets.main.output.classesDirs,
project(":openbis").sourceSets.main.compileClasspath,
project(":common").sourceSets.main.java.srcDirs,
project(":openbis_api").sourceSets.main.java.srcDirs,
project(":rtd_phosphonetx").sourceSets.main.java.srcDirs,
project(":rtd_phosphonetx").sourceSets.main.output.resourcesDir,
configurations.gwt
]
}
args = [ '-war',
'targets/www/gwt-temp',
"ch.systemsx.cisd.openbis.plugin.proteomics.OpenBISSafari",
'-draftCompile',
'-style', 'PRETTY',
'-logLevel', 'INFO']
}
task compileGWT(type: Copy, dependsOn: compileGWTToTemp) {
from "targets/www/gwt-temp/ch.systemsx.cisd.openbis.plugin.proteomics.OpenBIS"
into "targets/www/openbis-test-proteomics"
}
#
#
#
#show allResults
{root} = ch.systemsx.cisd.openbis
{generic} = ${root}.generic
{proteomics} = ${root}.plugin.proteomics
######################################################################
# Check dependencies to openbis
[etlserver] = ${root}.etlserver.proteomics.*
[private_openbis] = ${root}.* excluding [etlserver] ${root}.dss.generic.shared.* ${root}.generic.shared.* ${proteomics}.shared.* ${root}.common.*
check sets [etlserver]
check [etlserver] independentOf [private_openbis]
######################################################################
# Check plugin dependencies
#
[proteomics.client] = ${proteomics}.client.*
[proteomics.server] = ${proteomics}.server.*
[proteomics.shared] = ${proteomics}.shared.*
[other.client] = ${generic}.client.* ${root}.plugin.*.client excluding [proteomics.client]
[other.server] = ${generic}.server.* ${root}.plugin.*.server excluding [proteomics.server]
check sets [proteomics.client] [proteomics.server] [proteomics.shared]
layer proteomics.client-server = [proteomics.client] [proteomics.server]
layer proteomics.shared = [proteomics.shared]
check layeringOf proteomics.shared proteomics.client-server
check [proteomics.client] independentOf [other.server]
check [proteomics.server] independentOf [other.client]
######################################################################
# Check API
#
[api-shared] = ${root}.generic.shared.api.* ${proteomics}.shared.api.*
[api-client] = ${proteomics}.client.api.*
[everything-except-java] = * excluding java* *.annotation.* ch.systemsx.cisd.common.api.IRpcService
[everything-except-java-and-api-shared] = [everything-except-java] excluding [api-shared] ${root}.common.api.client.* ch.systemsx.cisd.common.api.retry.*
check sets [api-shared] [api-client]
check [api-shared] independentOf [everything-except-java]
check [api-client] independentOf [everything-except-java-and-api-shared]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment