Skip to content
Snippets Groups Projects
Commit 539445f5 authored by felmer's avatar felmer
Browse files

LMS-2818 improving core plugins and service.properties for proteomics

SVN: 24617
parent ecd1297e
No related branches found
No related tags found
No related merge requests found
...@@ -4,6 +4,9 @@ ...@@ -4,6 +4,9 @@
# Variables: # Variables:
# incoming-root-dir # incoming-root-dir
# Path to the directory which contains incoming directories for drop boxes. # Path to the directory which contains incoming directories for drop boxes.
#
# This drop box assumes that data source 'proteomics-db' has been defined.
#
incoming-dir = ${incoming-root-dir}/incoming-ms-search incoming-dir = ${incoming-root-dir}/incoming-ms-search
incoming-data-completeness-condition = auto-detection incoming-data-completeness-condition = auto-detection
...@@ -20,7 +23,7 @@ type-extractor.is-measured = false ...@@ -20,7 +23,7 @@ type-extractor.is-measured = false
storage-processor = ch.systemsx.cisd.openbis.etlserver.proteomics.StorageProcessorWithResultDataSetUploader storage-processor = ch.systemsx.cisd.openbis.etlserver.proteomics.StorageProcessorWithResultDataSetUploader
storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
storage-processor.assuming-extended-prot-xml = false storage-processor.assuming-extended-prot-xml = false
storage-processor.database.basic-name = ${data-source.basicDatabaseName} storage-processor.database.basic-name = ${proteomics-db.basicDatabaseName}
storage-processor.database.kind = ${data-source.databaseKind} storage-processor.database.kind = ${proteomics-db.databaseKind}
storage-processor.database.owner = storage-processor.database.owner =
storage-processor.database.password = storage-processor.database.password =
\ No newline at end of file
# Unique code of this Data Store Server. Not more than 40 characters. # Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1 data-store-server-code = DSS1
root-dir = targets/playground
# The root directory of the data store # The root directory of the data store
storeroot-dir = targets/playground/data/store storeroot-dir = ${root-dir}/data/store
incoming-root-dir = ${root-dir}/data
proteomics-database-kind = dev
# The directory where the command queue file is located; defaults to storeroot-dir # The directory where the command queue file is located; defaults to storeroot-dir
commandqueue-dir = commandqueue-dir =
...@@ -42,10 +48,14 @@ highwater-mark = -1 ...@@ -42,10 +48,14 @@ highwater-mark = -1
# does not affect the mails which are sent, when the data set could not be registered. # does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false notify-successful-registration = false
dss-temp-dir = ${root-dir}/dss-temp
dss-registration-log-dir = ${root-dir}/dss-registration-logs
use-ssl = false use-ssl = false
# The URL of the openBIS server # The URL of the openBIS server
server-url = http://localhost:8888/openbis server-url = http://localhost:8888/openbis
server-timeout-in-minutes = 10
# The username to use when contacting the openBIS server # The username to use when contacting the openBIS server
username = etlserver username = etlserver
...@@ -76,157 +86,11 @@ failure-interval = 10 ...@@ -76,157 +86,11 @@ failure-interval = 10
# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
quiet-period = 10 quiet-period = 10
# Define names of data sources
data-sources = data-source
data-source.databaseEngineCode = postgresql
data-source.basicDatabaseName = proteomics
data-source.databaseKind = dev
# --------------------------------------------------------------------------- core-plugins-folder = dist/core-plugins
# maintenance plugins configuration
# ---------------------------------------------------------------------------
# Comma separated names of maintenance plugins.
# Each plugin should have configuration properties prefixed with its name.
# Mandatory properties for each <plugin> include:
# <plugin>.class - Fully qualified plugin class name
# <plugin>.interval - The time between plugin executions (in seconds)
maintenance-plugins = data-set-clean-up
data-set-clean-up.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
data-set-clean-up.interval = 300
data-set-clean-up.data-source = data-source
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Globally used separator character which separates entities in a data set file name # Globally used separator character which separates entities in a data set file name
data-set-file-name-entity-separator = _ data-set-file-name-entity-separator = _
# Specifies what should happen if an error occurs during dataset processing.
# By default this flag is set to false and user has to modify the 'faulty paths file'
# each time the faulty dataset should be processed again.
# Set this flag to true if the processing should be repeated after some time without manual intervention.
# Note that this can increase the server load.
# reprocess-faulty-datasets = false
reporting-plugins = demo-reporter, protein-table-report
# Label of the plugin which will be shown for the users.
demo-reporter.label = Show Dataset Size
# Comma separated list of dataset type codes which can be handled by this plugin.
demo-reporter.dataset-types = PROT_RESULT
# Plugin class specification (together with the list of packages this class belongs to).
demo-reporter.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
# The property file. Its content will be passed as a parameter to the plugin.
demo-reporter.properties-file =
protein-table-report.label = APMS Report
protein-table-report.class = ch.systemsx.cisd.openbis.dss.proteomics.server.plugins.APMSReport
protein-table-report.dataset-types = PROT_RESULT
# Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
processing-plugins = copy-data-sets, ms-inj-copy, protein-table-report-creation
# The configuration of the processing plugin is the same as the reporting plugins configuration.
copy-data-sets.label = Data Set Copier to User Location
copy-data-sets.dataset-types = UNKNOWN, PROT_RESULT
copy-data-sets.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetCopierForUsers
copy-data-sets.send-detailed-email = true
#copy-data-sets.destination = /Volumes/share-1-\$/user/cisd/felmer/proteomics/
copy-data-sets.destination = vesuvio:tmp/${user}
ms-inj-copy.label = MS INJECTION Data Set Copier
ms-inj-copy.dataset-types = UNKNOWN, RAW_DATA,MZXML_DATA,PROT_RESULT
ms-inj-copy.class = ch.systemsx.cisd.openbis.dss.proteomics.server.plugins.DataSetCopier
ms-inj-copy.marker-file-prefix = .MARKER_finished_
ms-inj-copy.send-detailed-email = true
#ms-inj-copy.destination = /Volumes/share-1-\$/user/cisd/felmer/proteomics/
ms-inj-copy.destination = vesuvio:tmp
protein-table-report-creation.label = Send me the APMS Report
protein-table-report-creation.class = ch.systemsx.cisd.openbis.dss.proteomics.server.plugins.APMSReport
protein-table-report-creation.dataset-types = PROT_RESULT
data-set-validators = val1
val1.data-set-type = HCS_IMAGE_ANALYSIS_DATA
val1.path-patterns = **/*.tsv, **/*.txt
val1.columns = id, col1, col2
val1.id.header-pattern = ID|id
val1.id.mandatory = true
val1.id.order = 1
# value-type is either unique, any, string, numeric
val1.id.value-type = unique
val1.id.value-pattern = ^(CHEBI:[0-9]{4}_)*(CHEBI:[0-9]{4})$
val1.col1.header-pattern = Description
val1.col1.mandatory = true
val1.col2.header-pattern = .*
val1.col2.value-type = numeric
val1.col2.value-range = [0,Infinity)
# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
inputs=ms-injection,prot-result,prot-result2,cifex
# ---------------------------------------------------------------------------
# MS Injection thread configuration
# ---------------------------------------------------------------------------
ms-injection.incoming-dir = targets/playground/data/incoming-ms
ms-injection.incoming-data-completeness-condition = auto-detection
ms-injection.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.DataSetInfoExtractorForMSInjection
ms-injection.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
ms-injection.type-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.TypeExtractorForMSInjection
# ---------------------------------------------------------------------------
# 'prot-result' thread configuration
# ---------------------------------------------------------------------------
# The directory to watch for incoming data.
prot-result.incoming-dir = targets/playground/data/incoming
# Determines when the incoming data should be considered complete and ready to be processed.
# Allowed values:
# - auto-detection - when no write access will be detected for a specified 'quite-period'
# - marker-file - when an appropriate marker file for the data exists.
# The default value is 'marker-file'.
prot-result.incoming-data-completeness-condition = auto-detection
# The group the samples extracted by this thread belong to. If commented out or empty, then samples
# are considered associated to a database instance (not group private).
# prot-result.group-code = <change this>
# ---------------- Plugin properties
# The extractor class to use for code extraction
prot-result.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.DataSetInfoExtractorForProteinResults
prot-result.data-set-info-extractor.separator = +
# The extractor class to use for type extraction
prot-result.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
prot-result.type-extractor.file-format-type = XML
prot-result.type-extractor.locator-type = RELATIVE_LOCATION
prot-result.type-extractor.data-set-type = PROT_RESULT
prot-result.type-extractor.is-measured = true
# The storage processor (IStorageProcessor implementation)
prot-result.storage-processor = ch.systemsx.cisd.openbis.etlserver.proteomics.StorageProcessorWithResultDataSetUploader
prot-result.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
prot-result.storage-processor.database.basic-name = ${data-source.basicDatabaseName}
prot-result.storage-processor.database.kind = ${data-source.databaseKind}
prot-result.storage-processor.database.owner =
prot-result.storage-processor.database.password =
prot-result2. = prot-result.
prot-result2.incoming-dir = targets/playground/data/incoming2
prot-result2.data-set-info-extractor.experiment-type-code = MS_QUANTIFICATION
prot-result2.data-set-info-extractor.experiment-properties-file-name = quantification.properties
# ---------------------------------------------------------------------------
# 'cifex' thread configuration
# ---------------------------------------------------------------------------
cifex.incoming-dir = targets/playground/data/incoming-cifex
cifex.incoming-data-completeness-condition = auto-detection
cifex.delete-unidentified = true
cifex.data-set-info-extractor = ch.systemsx.cisd.etlserver.cifex.CifexDataSetInfoExtractor
cifex.type-extractor = ch.systemsx.cisd.etlserver.cifex.CifexTypeExtractor
cifex.storage-processor = ch.systemsx.cisd.etlserver.CifexStorageProcessor
cifex.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
cifex.storage-processor.keep-file-regex = .*(?<!request\.properties)$
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment