Skip to content
Snippets Groups Projects
Commit 2c49fa48 authored by buczekp's avatar buczekp
Browse files

initial commit

SVN: 16118
parent b79b516e
No related branches found
No related tags found
No related merge requests found
#! /bin/bash
ME="$0"
MYDIR=${ME%/*}
cd $MYDIR
ant -lib ../../build_resources/lib/ecj.jar "$@"
<project name="plasmid" default="ci" basedir="..">
<import file="../../datastore_server/build/build.xml" />
<project-classpath name="ecp" classes="${classes}" />
<property name="original.dist" value="dist" />
<property name="mainfolder" value="plasmid" />
<property name="variant" value="-plasmid" />
<target name="compile" depends="build-common.compile, clean" />
<target name="run-tests">
<antcall target="build-common.run-tests">
<param name="test.suite" value="tests.xml" />
</antcall>
</target>
<!--
// Task for creating distributions
-->
<target name="dist" depends="datastore_server.make-dist" />
<!--
// Task for continuous integration server.
-->
<target name="ci" depends="build-common.ci, dist, check-dependencies" />
<target name="dss-jar" depends="datastore_server.dss-jar">
<jar update="true" destfile="${dss-jar.file}">
<fileset dir="source">
<include name="**/*.sql" />
</fileset>
</jar>
</target>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
<appender name="STDOUT" class="org.apache.log4j.ConsoleAppender">
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/>
</layout>
</appender>
<appender name="NULL" class="org.apache.log4j.varia.NullAppender" />
<root>
<priority value ="info" />
<appender-ref ref="STDOUT" />
</root>
</log4j:configuration>
# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1
data-folder = targets/playground/data
# The root directory of the data store
storeroot-dir = ${data-folder}/store
# The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir =
# Port
port = 8889
# Session timeout in minutes
session-timeout = 720
# Set to 'false' for development/testing without deployed server. In this mode datastore will not use
# SSL when connecting to openbis. Otherwise all 'keystore' properties need to be set for SSL connection
# (default when use-ssl property is not set so there is no need to specify it on production servers).
use-ssl = false
# Path to the keystore
keystore.path = ../datastore_server/dist/etc/openBIS.keystore
# Password of the keystore
keystore.password = changeit
# Key password of the keystore
keystore.key-password = changeit
# The check interval (in seconds)
check-interval = 5
# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something
# lower than 180.
shutdown-timeout = 2
# If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching.
highwater-mark = -1
# If a data set is successfully registered it sends out an email to the registrator.
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false
# The URL of the openBIS server
server-url = http://localhost:8888
# The username to use when contacting the openBIS server
username = etlserver
# The password to use when contacting the openBIS server
password = etlserver
# The base URL for Web client access.
download-url = https://localhost:${port}
# SMTP properties (must start with 'mail' to be considered).
mail.smtp.host = file://targets/playground/email
mail.from = datastore_server@localhost
# ---------------- Timing parameters for file system operations on remote shares.
# Time (in seconds) to wait for any file system operation to finish. Operations exceeding this
# timeout will be terminated.
timeout = 60
# Number of times that a timed out operation will be tried again (0 means: every file system
# operation will only ever be performed once).
max-retries = 11
# Time (in seconds) to wait after an operation has been timed out before re-trying.
failure-interval = 10
# The period of no write access that needs to pass before an incoming data item is considered
# complete and ready to be processed (in seconds) [default: 300].
# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
quiet-period = 10
# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
inputs = main-thread
# ---------------------------------------------------------------------------
# main thread configuration
# ---------------------------------------------------------------------------
# The directory to watch for incoming data.
main-thread.incoming-dir = ${data-folder}/incoming
# Determines when the incoming data should be considered complete and ready to be processed.
# Allowed values:
# - auto-detection - when no write access will be detected for a specified 'quite-period'
# - marker-file - when an appropriate marker file for the data exists.
# The default value is 'marker-file'.
main-thread.incoming-data-completeness-condition = auto-detection
main-thread.delete-unidentified = true
# ---------------- Plugin properties
# TODO
# The extractor class to use for code extraction
main-thread.data-set-info-extractor = ch.systemsx.cisd.cina.dss.info.CinaDataSetInfoExtractor
main-thread.data-set-info-extractor = eu.basysbio.cisd.dss.DataSetInfoExtractor
# The extractor class to use for type extraction
main-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
main-thread.type-extractor.file-format-type = GB
main-thread.type-extractor.locator-type = RELATIVE_LOCATION
main-thread.type-extractor.data-set-type = PLASMID
main-thread.type-extractor.is-measured = true
#main-thread.type-extractor = ch.systemsx.cisd.cina.dss.info.CinaTypeExtractor
#main-thread.type-extractor = ch.systemsx.cisd.etlserver.cifex.CifexTypeExtractor
# The storage processor (IStorageProcessor implementation)
main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
# Cifex?
\ No newline at end of file
#show allResults
{root} = ch.systemsx.cisd.plasmid
{dss} = ${root}.dss
{openbis} = ch.systemsx.cisd.openbis
######################################################################
# Check dependencies to openbis
[dss] = ${dss}.*
[private_openbis] = ${openbis}.* excluding [dss] ${openbis}.dss.generic.shared.* ${openbis}.generic.shared.* ${openbis}.dss.generic.server.plugins.*
check sets [dss]
check [dss] independentOf [private_openbis]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment