Skip to content
Snippets Groups Projects
service.properties 7.66 KiB
# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1

# The root directory of the data store
storeroot-dir = data/store

# The directory where the command queue file is located; defaults to storeroot-dir 
commandqueue-dir =

# Port
port = 8444

# Session timeout in minutes
session-timeout = 720

# Path to the keystore
keystore.path = etc/openBIS.keystore

# Password of the keystore
keystore.password = changeit

# Key password of the keystore
keystore.key-password = changeit

# The check interval (in seconds)
check-interval = 60

# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large 
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something 
# lower than 180.
shutdown-timeout = 180

# If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching.
highwater-mark = -1

# If a data set is successfully registered it sends out an email to the registrator. 
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false

# The URL of the openBIS server
server-url = https://localhost:8443/openbis/openbis

# The username to use when contacting the openBIS server
username = etlserver

# The password to use when contacting the openBIS server
password = etlserver

# The base URL for Web client access.
download-url = https://localhost:8444

# SMTP properties (must start with 'mail' to be considered). 
# mail.smtp.host = localhost
# mail.from = datastore_server@localhost

# ---------------- Timing parameters for file system operations on remote shares.

# Time (in seconds) to wait for any file system operation to finish. Operations exceeding this 
# timeout will be terminated. 
timeout = 60
# Number of times that a timed out operation will be tried again (0 means: every file system 
# operation will only ever be performed once).
max-retries = 11
# Time (in seconds) to wait after an operation has been timed out before re-trying.  
failure-interval = 10 

# The period of no write access that needs to pass before an incoming data item is considered 
# complete and ready to be processed (in seconds) [default: 300]. 
# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
quiet-period = 10

# Globally used separator character which separates entities in a data set file name 
data-set-file-name-entity-separator = _

# Specifies what should happen if an error occurs during dataset processing. 
# By default this flag is set to false and user has to modify the 'faulty paths file' 
# each time the faulty dataset should be processed again.
# Set this flag to true if the processing should be repeated after some time without manual intervention.
# Note that this can increase the server load.
# reprocess-faulty-datasets = false

# ---------------------------------------------------------------------------
# reporting and processing plugins configuration
# ---------------------------------------------------------------------------

# Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
# If name has 'default-' prefix it will be used by default in data set Data View.
reporting-plugins = illumina-summary

# Label of the plugin which will be shown for the users.
illumina-summary.label = Illumina Summary 
# Comma separated list of dataset type codes which can be handled by this plugin.
illumina-summary.dataset-types = SHORT_READS_SEQUENCING, ILLUMINA_GA_OUTPUT
# Plugin class specification (together with the list of packages this class belongs to).
illumina-summary.class = ch.ethz.bsse.cisd.dsu.dss.plugins.IlluminaSummaryReportingPlugin
# The property file. Its content will be passed as a parameter to the plugin.
illumina-summary.properties-file =

# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
inputs = flow-cell, fl1, fl2

# ---------------------------------------------------------------------------
# 'flow-cell' thread configuration
# ---------------------------------------------------------------------------
# The directory to watch for incoming data.
flow-cell.incoming-dir = data/incoming

# Determines when the incoming data should be considered complete and ready to be processed.
# Allowed values: 
#  - auto-detection - when no write access will be detected for a specified 'quite-period'
#  - marker-file		- when an appropriate marker file for the data exists. 
# The default value is 'marker-file'.
flow-cell.incoming-data-completeness-condition = auto-detection

# The group the samples extracted by this thread belong to. If commented out or empty, then samples
# are considered associated to a database instance (not group private). 
# flow-cell.group-code = <change this>

# ---------------- Plugin properties
# The extractor class to use for code extraction
flow-cell.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
flow-cell.data-set-info-extractor.strip-file-extension = true
flow-cell.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
flow-cell.data-set-info-extractor.group-code = TEST
# Location of file containing data set properties 
#flow-cell.data-set-info-extractor.data-set-properties-file-name = data-set.properties

# The extractor class to use for type extraction
flow-cell.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
flow-cell.type-extractor.file-format-type = PROPRIETARY
flow-cell.type-extractor.locator-type = RELATIVE_LOCATION
flow-cell.type-extractor.data-set-type = PLG_RESULT
flow-cell.type-extractor.is-measured = true
# The storage processor (IStorageProcessor implementation)
flow-cell.storage-processor = ch.ethz.bsse.cisd.dsu.dss.StorageProcessor
flow-cell.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
# Template of flow-lane drop boxes. {0} is the place holder for flow lane number
flow-cell.storage-processor.flow-lane-drop-box-template = data/drop-box-{0}
flow-cell.storage-processor.transfer.BSSE = data/drop-box-bsse
flow-cell.storage-processor.meta-data-file-prefix = PREFIX_FOR_TSV

# -------- Flow Lane 1 --------------------------------------
fl1.incoming-dir = data/drop-box-1
fl1.data-set-info-extractor = ch.ethz.bsse.cisd.dsu.dss.FlowLaneDataSetInfoExtractor
fl1.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
fl1.data-set-info-extractor.index-of-group-code = 0
fl1.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
fl1.type-extractor.file-format-type = PROPRIETARY
fl1.type-extractor.data-set-type = FLOW_LANE
fl1.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor

# -------- Flow Lane 2 --------------------------------------
fl2.incoming-dir = data/drop-box-2
fl2.data-set-info-extractor = ch.ethz.bsse.cisd.dsu.dss.FlowLaneDataSetInfoExtractor
fl2.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
fl2.data-set-info-extractor.index-of-group-code = 0
fl2.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
fl2.type-extractor.file-format-type = PROPRIETARY
fl2.type-extractor.data-set-type = FLOW_LANE
fl2.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor