diff --git a/openbis_standard_technologies/build/build.xml b/openbis_standard_technologies/build/build.xml index 5da60ba3edb00b080929384d3b6bc87faa7277e3..ffbb6c33829bd354f90b0d762929c21e443a92f0 100644 --- a/openbis_standard_technologies/build/build.xml +++ b/openbis_standard_technologies/build/build.xml @@ -5,7 +5,26 @@ <project-classpath name="ecp.gwt" classes="${targets}/www/WEB-INF/classes" /> <property name="gwt.extended.sources" value="${gwt.sources}:../rtd_phosphonetx/${sources}:../screening/${sources}" /> - <target name="dist" depends="openbis.make-dist"> + <target name="dist" depends="clean"> + <antcall target="openbis.make-dist"> + <param name="variant" value="-standard-technologies" /> + </antcall> + <mkdir dir="${dist.datastore_server.lib}" /> + <antcall target="dist-upload-gui"> + <param name="variant" value="" /> + </antcall> + <antcall target="datastore_server.add-dss-dist-libs"/> + <build-info revision="revision.number" version="version.number" clean="clean.flag" /> + <antcall target="make-dss-jar"/> + <antcall target="rtd_phosphonetx.make-dss-plugin-jar"/> + <move file="${plugin-jar.file}" tofile="${dist.datastore_server.lib}/datastore_server_plugin-proteomics.jar" /> + <antcall target="screening.make-dss-plugin-jar"/> + <move file="${plugin-jar.file}" tofile="${dist.datastore_server.lib}/datastore_server_plugin-screening.jar" /> + <antcall target="screening.add-stuff-to-dss-dist"/> + <antcall target="screening.compute-dss-checksum"/> + <antcall target="datastore_server.create-distribution"> + <param name="variant" value="-standard-technologies" /> + </antcall> </target> <target name="war" depends="rtd_phosphonetx.war, screening.war"> diff --git a/openbis_standard_technologies/dist/etc/log.xml b/openbis_standard_technologies/dist/etc/log.xml new file mode 100644 index 0000000000000000000000000000000000000000..19687423ff3f19206356ebdf01fa941eaad8d96a --- /dev/null +++ b/openbis_standard_technologies/dist/etc/log.xml @@ -0,0 +1,61 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'> + + <appender name="DEFAULT" class="org.apache.log4j.DailyRollingFileAppender"> + + <param name="File" value="log/datastore_server_log.txt"/> + <param name="DatePattern" value="'.'yyyy-MM-dd"/> + + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/> + </layout> + + </appender> + + <appender name="STDOUT" class="org.apache.log4j.ConsoleAppender"> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/> + </layout> + </appender> + + <appender name="NULL" class="org.apache.log4j.varia.NullAppender" /> + + <appender name="EMAIL" class="org.apache.log4j.net.SMTPAppender"> + + <param name="BufferSize" value="512" /> + <param name="SMTPHost" value="localhost" /> + <param name="From" value="datastore-server-openbis@localhost" /> + <param name="To" value="openbis@localhost" /> + <param name="Subject" value="ATTENTION: Data Set Server" /> + <param name="EvaluatorClass" value="ch.systemsx.cisd.common.logging.AlwaysTrueTriggeringEventEvaluator" /> + + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p [%t] %c - %m%n"/> + </layout> + + <!--filter class="org.apache.log4j.varia.LevelRangeFilter"> + <param name="LevelMin" value="ERROR"/> + <param name="LevelMax" value="FATAL"/> + </filter--> + + </appender> + + <category name="NOTIFY"> + <priority value="info" /> + <appender-ref ref="DEFAULT" /> + <appender-ref ref="EMAIL" /> + </category> + + <root> + <priority value="info" /> + <appender-ref ref="DEFAULT" /> + </root> + + <logger name="loci.formats.in"> + <!-- Print only messages of level warn or above from the BioFormats library --> + <level value="warn"/> + </logger> + +</log4j:configuration> diff --git a/openbis_standard_technologies/dist/etc/service.properties b/openbis_standard_technologies/dist/etc/service.properties new file mode 100644 index 0000000000000000000000000000000000000000..adf53b9f248926f1bc0f480826aab13a13d89075 --- /dev/null +++ b/openbis_standard_technologies/dist/etc/service.properties @@ -0,0 +1,434 @@ +# Unique code of this Data Store Server. Not more than 40 characters. +data-store-server-code = DSS1 + +# host name of the machine on which the datastore server is running +host-address = https://localhost + +# parent directory of the store directory and all the dropboxes +root-dir = data + +# The root directory of the data store +storeroot-dir = ${root-dir}/store + +# The directory where the command queue file is located; defaults to storeroot-dir +commandqueue-dir = + +# Port +port = 8444 +use-ssl = true + +# Session timeout in minutes +session-timeout = 720 + +# Path to the keystore +keystore.path = etc/openBIS.keystore +# Password of the keystore +keystore.password = changeit +# Key password of the keystore +keystore.key-password = changeit + +# The check interval (in seconds) +check-interval = 60 + +# The time-out for clean up work in the shutdown sequence (in seconds). +# Note that that the maximal time for the shutdown sequence to complete can be as large +# as twice this time. +# Remark: On a network file system, it is not recommended to turn this value to something +# lower than 180. +shutdown-timeout = 180 + +# If free disk space goes below value defined here, a notification email will be sent. +# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is +# specified or if value is negative, the system will not be watching. +highwater-mark = -1 + +# If a data set is successfully registered it sends out an email to the registrator. +# If this property is not specified, no email is sent to the registrator. This property +# does not affect the mails which are sent, when the data set could not be registered. +notify-successful-registration = false + +# The URL of the openBIS server +server-url = ${host-address}:8443 + +# The username to use when contacting the openBIS server +username = etlserver + +# The password for the etlserver user who contacts the openBIS server +password = etlserver_password + +# The base URL for Web client access to the data store server. +download-url = ${host-address}:${port} + +# SMTP properties (must start with 'mail' to be considered). +# The current configuration saves the emails in the file system in the root directory +mail.smtp.host = file://${root-dir} +# mail.smtp.host = localhost +# mail.from = openbis-dss@localhost +# mail.smtp.user = +# mail.smtp.password = + +# --------------------------------------------------------------------------- +# (optional) archiver configuration +# --------------------------------------------------------------------------- + +# Configuration of an archiver task. All properties are prefixed with 'archiver.'. + +# Archiver class specification (together with the list of packages this class belongs to). +#archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver + +# --------------------------------------------------------------------------- +# INTERNAL CONFIGURATION, +# Do not change this part unless you are developing openBIS extensions. +# --------------------------------------------------------------------------- + +# --------------------------------------------------------------------------- +# screening database specification +# --------------------------------------------------------------------------- + +data-sources = imaging-db, data-source +imaging-db.version-holder-class = ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseVersionHolder +imaging-db.databaseEngineCode = postgresql +imaging-db.basicDatabaseName = imaging +imaging-db.databaseKind = productive +imaging-db.scriptFolder = sql/imaging +imaging-db.owner = +imaging-db.password = +# Credentials of a database user which is able to create a new database or roles in it. +# Leave empty to use the db engines defaults. +# Used only during the first start of the server or when server is upgraded to a new version. +imaging-db.adminUser = +imaging-db.adminPassword = + +data-source.databaseEngineCode = postgresql +data-source.basicDatabaseName = proteomics +data-source.databaseKind = productive + +# --------------------------------------------------------------------------- +# reporting and processing plugins configuration +# --------------------------------------------------------------------------- + +# Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. +reporting-plugins = default-plate-image-analysis, plate-image-analysis-graph + +default-plate-image-analysis.label = Image Analysis Results +default-plate-image-analysis.dataset-types = HCS_ANALYSIS_WELL_FEATURES +default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisMergedRowsReportingPlugin +default-plate-image-analysis.properties-file = + +plate-image-analysis-graph.label = Image Analysis Graphs +plate-image-analysis-graph.dataset-types = HCS_ANALYSIS_WELL_FEATURES +plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin +plate-image-analysis-graph.servlet-path = datastore_server_graph/ +plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties + +# --------------------------------------------------------------------------- +# screening specific extension servlets +# --------------------------------------------------------------------------- + +# list of additional web servlets which will be exposed +plugin-services = screening-image-download-servlet, tabular-data-graph-servlet, screening-dss-api-exporter-servlet + +# class of the web servlet +screening-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet +# URL which will be mapped to this servlet +screening-image-download-servlet.path = /datastore_server_screening/* + +tabular-data-graph-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet +tabular-data-graph-servlet.path = /datastore_server_graph/* +tabular-data-graph-servlet.properties-file = etc/tabular-data-graph.properties + +# expose an DSS API interface with RPC +screening-dss-api-exporter-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.DssScreeningApiServlet +screening-dss-api-exporter-servlet.path = /rmi-datastore-server-screening-api-v1/* + +# --------------------------------------------------------------------------- +# image overview plugins configuration +# --------------------------------------------------------------------------- + +# Comma separated names of image overview plugins. +# Each plugin should have configuration properties prefixed with its name. +# Generic properties for each <plugin> include: +# <plugin>.class - Fully qualified plugin class name (mandatory). +# <plugin>.default - If true all data set types not handled by other plugins should be handled +# by the plugin (default = false). +# <plugin>.dataset-types - Comma separated list of data set types handled by the plugin +# (optional and ignored if default is true, otherwise mandatory). +overview-plugins = microscopy-image-overview + +microscopy-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet +microscopy-image-overview.dataset-types = MICROSCOPY_IMAGE + +# --------------------------------------------------------------------------- + +maintenance-plugins=data-set-clean-up,data-set-clean-up-proteomics +# hierarchical-storage-updater + +# the plugin which is run periodically to create a mirror structure of the store with the same files +# but with user-readable structure of directories +hierarchical-storage-updater.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater +# specified in seconds. Here : every day +hierarchical-storage-updater.interval = 86400 +hierarchical-storage-updater.hierarchy-root-dir = ${root-dir}/hierarchical-store + +# Removes data sets deleted from openBIS also from imaging database +data-set-clean-up.class = ch.systemsx.cisd.openbis.dss.etl.DeleteFromImagingDBMaintenanceTask +# specified in seconds. Here : every day +data-set-clean-up.interval = 86400 +data-set-clean-up.data-source = imaging-db + +data-set-clean-up-proteomics.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask +data-set-clean-up-proteomics.interval = 300 +data-set-clean-up-proteomics.data-source = data-source +data-set-clean-up-proteomics.data-set-table-name = data_sets + + +# --------------------------------------------------------------------------- +# DROPBOXES CONFIGURATION +# --------------------------------------------------------------------------- + +incoming-root-dir = ${root-dir} + +# Globally used separator character which separates entities in a data set file name +data-set-file-name-entity-separator = _ + +# The period of no write access that needs to pass before an incoming data item is considered +# complete and ready to be processed (in seconds) [default: 300]. +# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. +quiet-period = 300 + +# code of the default space in openBIS to which the data will be imported +import-space-code = DEMO + +# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. +# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' +inputs=merged-channels-images, split-channels-images, image-analysis-results, ms-injection, ms-search + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels +merged-channels-images.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +merged-channels-images.data-set-info-extractor.entity-separator = . +merged-channels-images.data-set-info-extractor.index-of-sample-code = 0 +merged-channels-images.data-set-info-extractor.index-of-data-producer-code = +merged-channels-images.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +merged-channels-images.type-extractor.file-format-type = JPG +merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION +merged-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW +merged-channels-images.type-extractor.is-measured = true + +# Note: this storage processor is able to process folders compressed with zip as well +merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor +# How should the original data be stored? Possible values: +# unchanged - nothing is changed, the default +# hdf5 - all the data will be packaged into one hdf5 file +# hdf5_compressed - like hdf5, but each file is stored in a compressed form +merged-channels-images.storage-processor.original-data-storage-format = unchanged +# Should the thumbnails be generated? +# It slows down the dataset registration, but increases the performance when the user wants to see the image. +# Can be 'true' or 'false', 'false' is the default value +merged-channels-images.storage-processor.generate-thumbnails = false +# Thumbnails size in pixels +# merged-channels-images.storage-processor.thumbnail-max-width = 300 +# merged-channels-images.storage-processor.thumbnail-max-height = 200 +# Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_-]. +# Number and order of entries must be consistent with 'channel-labels'. +merged-channels-images.storage-processor.channel-codes = GFP, DAPI +# Labels of the channels in which images have been acquired. +# Number and order of entries must be consistent with 'channel-codes'. +merged-channels-images.storage-processor.channel-labels = Gfp, Dapi + +# Optional boolean property, true by default. +# Set to false to allow datasets in one experiment to use different channels. +# In this case 'channel-codes' and 'channel-labels' become optional and are used only to determine the label for each channel code. +# It should be set to 'false' for overlay image datasets. +#merged-channels-images.storage-processor.define-channels-per-experiment = false + +# Optional boolean property, true by default. +# If true an email is sent if some images for the uploaded plate are missing. +#merged-channels-images.storage-processor.notify-if-incomplete = false + +# Optional boolean property, true by default. +# If set to false then the dataset whcih cannot be registered will be left in the incoming folder +# and will be mentioned in the .faulty_paths file +#merged-channels-images.storage-processor.move-unregistered-datasets-to-error-dir = false + +# This is an optional boolean property which defines if all image datasets in one experiment have the same +# channels or if each imported dataset can have different channels. By default true if not specified. +#merged-channels-images.storage-processor.define-channels-per-experiment = false +# Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images. +merged-channels-images.storage-processor.well_geometry = 3x3 +# implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel +# Here: the extractor requireds that each image name should adhere to the schema: +# <any-text>_<plate-code>_<well-code>_<tile-code>_<channel-name>.<allowed-image-extension> +merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor +# specification of the imaging db +merged-channels-images.storage-processor.data-source = imaging-db +# Optional comma separated list of color components. +# Available values: RED, GREEN or BLUE. +# If specified then the channels are extracted from the color components and override 'file-extractor' results. +merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +split-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-split-channels +split-channels-images.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +split-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +split-channels-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} +split-channels-images.data-set-info-extractor.index-of-sample-code = -1 +split-channels-images.data-set-info-extractor.index-of-data-producer-code = 1 +split-channels-images.data-set-info-extractor.index-of-data-production-date = 0 +split-channels-images.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +split-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +split-channels-images.type-extractor.file-format-type = JPG +split-channels-images.type-extractor.locator-type = RELATIVE_LOCATION +split-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW +split-channels-images.type-extractor.is-measured = true + +split-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor +split-channels-images.storage-processor.generate-thumbnails = false +# The first specified channel will be blue, the second will be green and the third will be red. +# If there are more channels, combinations of colors will be used. +split-channels-images.storage-processor.channel-codes = GFP, DAPI +split-channels-images.storage-processor.channel-labels = Gfp, Dapi +split-channels-images.storage-processor.well_geometry = 3x3 +split-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor +split-channels-images.storage-processor.data-source = imaging-db + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis +image-analysis-results.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +# Separator used to extract the barcode in the data set file name +image-analysis-results.data-set-info-extractor.entity-separator = . +image-analysis-results.data-set-info-extractor.index-of-sample-code = 0 +image-analysis-results.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +image-analysis-results.type-extractor.file-format-type = CSV +image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION +image-analysis-results.type-extractor.data-set-type = HCS_ANALYSIS_WELL_FEATURES +image-analysis-results.type-extractor.is-measured = false + +# The storage processor (IStorageProcessor implementation) +image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor +image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor +image-analysis-results.storage-processor.data-source = imaging-db +# semi-colon (;) by default +image-analysis-results.storage-processor.separator = , +image-analysis-results.storage-processor.ignore-comments = true +image-analysis-results.storage-processor.well-name-row = row +image-analysis-results.storage-processor.well-name-col = col +image-analysis-results.storage-processor.well-name-col-is-alphanum = true + +# --- Example configuration of a dropbox for images which are not connected to wells on the plate + +# The directory to watch for incoming data. +#microscopy-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy +#microscopy-dropbox.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +#microscopy-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +#microscopy-dropbox.data-set-info-extractor.entity-separator = . +#microscopy-dropbox.data-set-info-extractor.index-of-sample-code = 0 +#microscopy-dropbox.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +#microscopy-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +#microscopy-dropbox.type-extractor.file-format-type = TIFF +#microscopy-dropbox.type-extractor.locator-type = RELATIVE_LOCATION +#microscopy-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE +#microscopy-dropbox.type-extractor.is-measured = true + +#microscopy-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyStorageProcessor +#microscopy-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyImageFileExtractor +#microscopy-dropbox.storage-processor.data-source = imaging-db +#microscopy-dropbox.storage-processor.channel-names = BLUE, GREEN, RED +#microscopy-dropbox.storage-processor.well_geometry = 2x3 +#microscopy-dropbox.storage-processor.tile_mapping = 1,2,3;4,5,6 + +# --- Microscopy dropbox with a series of images with any names --------------------------- + +# The directory to watch for incoming data. +#microscopy-series-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy-series +#microscopy-series-dropbox.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +#microscopy-series-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +#microscopy-series-dropbox.data-set-info-extractor.entity-separator = . +#microscopy-series-dropbox.data-set-info-extractor.index-of-sample-code = 0 +#microscopy-series-dropbox.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +#microscopy-series-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +#microscopy-series-dropbox.type-extractor.file-format-type = TIFF +#microscopy-series-dropbox.type-extractor.locator-type = RELATIVE_LOCATION +#microscopy-series-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE +#microscopy-series-dropbox.type-extractor.is-measured = true + +#microscopy-series-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyBlackboxSeriesStorageProcessor +#microscopy-series-dropbox.storage-processor.data-source = imaging-db + +# --------------------------------------------------------------------------- +# 'ms-injection' drop box for spectra data +# --------------------------------------------------------------------------- +# The directory to watch for incoming data. +ms-injection.incoming-dir = ${data-dir}/incoming-ms-injection + +# Determines when the incoming data should be considered complete and ready to be processed. +# Allowed values: +# - auto-detection - when no write access will be detected for a specified 'quite-period' +# - marker-file - when an appropriate marker file for the data exists. +# The default value is 'marker-file'. +ms-injection.incoming-data-completeness-condition = auto-detection + +ms-injection.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.phosphonetx.DataSetInfoExtractorForMSInjection +ms-injection.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor +ms-injection.type-extractor = ch.systemsx.cisd.openbis.etlserver.phosphonetx.TypeExtractorForMSInjection + +# --------------------------------------------------------------------------- +# 'ms-search' drop box for protein data +# --------------------------------------------------------------------------- +# The directory to watch for incoming data. +ms-search.incoming-dir = ${data-dir}/incoming-ms-search + +# Determines when the incoming data should be considered complete and ready to be processed. +# Allowed values: +# - auto-detection - when no write access will be detected for a specified 'quite-period' +# - marker-file - when an appropriate marker file for the data exists. +# The default value is 'marker-file'. +ms-search.incoming-data-completeness-condition = auto-detection + +ms-search.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.phosphonetx.DataSetInfoExtractorForProteinResults +ms-search.data-set-info-extractor.separator = + + +ms-search.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +ms-search.type-extractor.file-format-type = XML +ms-search.type-extractor.locator-type = RELATIVE_LOCATION +ms-search.type-extractor.data-set-type = PROT_RESULT +ms-search.type-extractor.is-measured = false + +ms-search.storage-processor = ch.systemsx.cisd.openbis.etlserver.phosphonetx.StorageProcessorWithResultDataSetUploader +ms-search.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor +ms-search.storage-processor.assuming-extended-prot-xml = false +ms-search.storage-processor.database.basic-name = ${data-source.basicDatabaseName} +ms-search.storage-processor.database.kind = ${data-source.databaseKind} +ms-search.storage-processor.database.owner = +ms-search.storage-processor.database.password = +