diff --git a/integration-tests/templates/datastore_server_screening/etc/service.properties b/integration-tests/templates/datastore_server_screening/etc/service.properties new file mode 100644 index 0000000000000000000000000000000000000000..be3a27e046689a3b53835e40df77f2f464435aea --- /dev/null +++ b/integration-tests/templates/datastore_server_screening/etc/service.properties @@ -0,0 +1,376 @@ +# Unique code of this Data Store Server. Not more than 40 characters. +data-store-server-code = dss-screening + +# host name of the machine on which the datastore server is running +host-address = https://localhost + +# parent directory of the store directory and all the dropboxes +root-dir = data + +# The root directory of the data store +storeroot-dir = ${root-dir}/store + +# The directory where the command queue file is located; defaults to storeroot-dir +commandqueue-dir = + +# Port +port = 8444 +use-ssl = true + +# Session timeout in minutes +session-timeout = 720 + +# Path to the keystore +keystore.path = etc/openBIS.keystore +# Password of the keystore +keystore.password = changeit +# Key password of the keystore +keystore.key-password = changeit + +# The check interval (in seconds) +check-interval = 60 + +# The time-out for clean up work in the shutdown sequence (in seconds). +# Note that that the maximal time for the shutdown sequence to complete can be as large +# as twice this time. +# Remark: On a network file system, it is not recommended to turn this value to something +# lower than 180. +shutdown-timeout = 180 + +# If free disk space goes below value defined here, a notification email will be sent. +# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is +# specified or if value is negative, the system will not be watching. +highwater-mark = -1 + +# If a data set is successfully registered it sends out an email to the registrator. +# If this property is not specified, no email is sent to the registrator. This property +# does not affect the mails which are sent, when the data set could not be registered. +notify-successful-registration = false + +# The URL of the openBIS server +server-url = ${host-address}:8443 + +# The username to use when contacting the openBIS server +username = etlserver + +# The password for the etlserver user who contacts the openBIS server +password = etlserver_password + +# The base URL for Web client access to the data store server. +download-url = ${host-address}:${port} + +# SMTP properties (must start with 'mail' to be considered). +# The current configuration saves the emails in the file system in the root directory +mail.smtp.host = file://${root-dir} +# mail.smtp.host = localhost +# mail.from = datastore_server@ethz.ch +# mail.smtp.user = +# mail.smtp.password = + +# --------------------------------------------------------------------------- +# (optional) archiver configuration +# --------------------------------------------------------------------------- + +# Configuration of an archiver task. All properties are prefixed with 'archiver.'. + +# Archiver class specification (together with the list of packages this class belongs to). +#archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver + +# --------------------------------------------------------------------------- +# INTERNAL CONFIGURATION, +# Do not change this part unless you are developing openBIS extensions. +# --------------------------------------------------------------------------- + +# --------------------------------------------------------------------------- +# screening database specification +# --------------------------------------------------------------------------- + +data-sources = imaging-db +imaging-db.version-holder-class = ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseVersionHolder +imaging-db.databaseEngineCode = postgresql +imaging-db.basicDatabaseName = imaging +imaging-db.databaseKind = productive +imaging-db.scriptFolder = sql +imaging-db.owner = +imaging-db.password = +# Credentials of a database user which is able to create a new database or roles in it. +# Leave empty to use the db engines defaults. +# Used only during the first start of the server or when server is upgraded to a new version. +imaging-db.adminUser = +imaging-db.adminPassword = + +# --------------------------------------------------------------------------- +# reporting and processing plugins configuration +# --------------------------------------------------------------------------- + +# Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. +reporting-plugins = default-plate-image-analysis, plate-image-analysis-graph + +default-plate-image-analysis.label = Image Analysis Results +default-plate-image-analysis.dataset-types = HCS_ANALYSIS_WELL_FEATURES +default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisMergedRowsReportingPlugin +default-plate-image-analysis.properties-file = + +plate-image-analysis-graph.label = Image Analysis Graphs +plate-image-analysis-graph.dataset-types = HCS_ANALYSIS_WELL_FEATURES +plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin +plate-image-analysis-graph.servlet-path = datastore_server_graph/ +plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties + +# --------------------------------------------------------------------------- +# screening specific extension servlets +# --------------------------------------------------------------------------- + +# list of additional web servlets which will be exposed +plugin-services = screening-image-download-servlet, tabular-data-graph-servlet, screening-dss-api-exporter-servlet + +# class of the web servlet +screening-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet +# URL which will be mapped to this servlet +screening-image-download-servlet.path = /datastore_server_screening/* + +tabular-data-graph-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet +tabular-data-graph-servlet.path = /datastore_server_graph/* +tabular-data-graph-servlet.properties-file = etc/tabular-data-graph.properties + +# expose an DSS API interface with RPC +screening-dss-api-exporter-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.DssScreeningApiServlet +screening-dss-api-exporter-servlet.path = /rmi-datastore-server-screening-api-v1/* + +# --------------------------------------------------------------------------- +# image overview plugins configuration +# --------------------------------------------------------------------------- + +# Comma separated names of image overview plugins. +# Each plugin should have configuration properties prefixed with its name. +# Generic properties for each <plugin> include: +# <plugin>.class - Fully qualified plugin class name (mandatory). +# <plugin>.default - If true all data set types not handled by other plugins should be handled +# by the plugin (default = false). +# <plugin>.dataset-types - Comma separated list of data set types handled by the plugin +# (optional and ignored if default is true, otherwise mandatory). +overview-plugins = microscopy-image-overview + +microscopy-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet +microscopy-image-overview.dataset-types = MICROSCOPY_IMAGE + +# --------------------------------------------------------------------------- + +maintenance-plugins=data-set-clean-up +# hierarchical-storage-updater + +# the plugin which is run periodically to create a mirror structure of the store with the same files +# but with user-readable structure of directories +hierarchical-storage-updater.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater +# specified in seconds. Here : every day +hierarchical-storage-updater.interval = 86400 +hierarchical-storage-updater.hierarchy-root-dir = ${root-dir}/hierarchical-store + +# Removes data sets deleted from openBIS also from imaging database +data-set-clean-up.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask +# specified in seconds. Here : every day +data-set-clean-up.interval = 86400 +data-set-clean-up.data-source = imaging-db + +# --------------------------------------------------------------------------- +# DROPBOXES CONFIGURATION +# --------------------------------------------------------------------------- + +incoming-root-dir = ${root-dir} + +# Globally used separator character which separates entities in a data set file name +data-set-file-name-entity-separator = _ + +# The period of no write access that needs to pass before an incoming data item is considered +# complete and ready to be processed (in seconds) [default: 300]. +# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. +quiet-period = 300 + +# code of the default space in openBIS to which the data will be imported +import-space-code = DEMO + +# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. +# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' +inputs=merged-channels-images, split-channels-images, image-analysis-results + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels +merged-channels-images.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +merged-channels-images.data-set-info-extractor.entity-separator = . +merged-channels-images.data-set-info-extractor.index-of-sample-code = 0 +merged-channels-images.data-set-info-extractor.index-of-data-producer-code = +merged-channels-images.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +merged-channels-images.type-extractor.file-format-type = JPG +merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION +merged-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW +merged-channels-images.type-extractor.is-measured = true + +# Note: this storage processor is able to process folders compressed with zip as well +merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor +# How should the original data be stored? Possible values: +# unchanged - nothing is changed, the default +# hdf5 - all the data will be packaged into one hdf5 file +# hdf5_compressed - like hdf5, but each file is stored in a compressed form +merged-channels-images.storage-processor.original-data-storage-format = unchanged +# Should the thumbnails be generated? +# It slows down the dataset registration, but increases the performance when the user wants to see the image. +# Can be 'true' or 'false', 'false' is the default value +merged-channels-images.storage-processor.generate-thumbnails = false +# Thumbnails size in pixels +# merged-channels-images.storage-processor.thumbnail-max-width = 300 +# merged-channels-images.storage-processor.thumbnail-max-height = 200 +# Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_-]. +# Number and order of entries must be consistent with 'channel-labels'. +merged-channels-images.storage-processor.channel-codes = GFP, DAPI +# Labels of the channels in which images have been acquired. +# Number and order of entries must be consistent with 'channel-codes'. +merged-channels-images.storage-processor.channel-labels = Gfp, Dapi + +# Optional boolean property, true by default. +# Set to false to allow datasets in one experiment to use different channels. +# In this case 'channel-codes' and 'channel-labels' become optional and are used only to determine the label for each channel code. +# It should be set to 'false' for overlay image datasets. +#merged-channels-images.storage-processor.define-channels-per-experiment = false + +# Optional boolean property, true by default. +# If true an email is sent if some images for the uploaded plate are missing. +#merged-channels-images.storage-processor.notify-if-incomplete = false + +# Optional boolean property, true by default. +# If set to false then the dataset whcih cannot be registered will be left in the incoming folder +# and will be mentioned in the .faulty_paths file +#merged-channels-images.storage-processor.move-unregistered-datasets-to-error-dir = false + +# This is an optional boolean property which defines if all image datasets in one experiment have the same +# channels or if each imported dataset can have different channels. By default true if not specified. +#merged-channels-images.storage-processor.define-channels-per-experiment = false +# Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images. +merged-channels-images.storage-processor.well_geometry = 3x3 +# implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel +# Here: the extractor requireds that each image name should adhere to the schema: +# <any-text>_<plate-code>_<well-code>_<tile-code>_<channel-name>.<allowed-image-extension> +merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor +# specification of the imaging db +merged-channels-images.storage-processor.data-source = imaging-db +# Optional comma separated list of color components. +# Available values: RED, GREEN or BLUE. +# If specified then the channels are extracted from the color components and override 'file-extractor' results. +merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +split-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-split-channels +split-channels-images.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +split-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +split-channels-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} +split-channels-images.data-set-info-extractor.index-of-sample-code = -1 +split-channels-images.data-set-info-extractor.index-of-data-producer-code = 1 +split-channels-images.data-set-info-extractor.index-of-data-production-date = 0 +split-channels-images.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +split-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +split-channels-images.type-extractor.file-format-type = JPG +split-channels-images.type-extractor.locator-type = RELATIVE_LOCATION +split-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW +split-channels-images.type-extractor.is-measured = true + +split-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor +split-channels-images.storage-processor.generate-thumbnails = false +# The first specified channel will be blue, the second will be green and the third will be red. +# If there are more channels, combinations of colors will be used. +split-channels-images.storage-processor.channel-codes = GFP, DAPI +split-channels-images.storage-processor.channel-labels = Gfp, Dapi +split-channels-images.storage-processor.well_geometry = 3x3 +split-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor +split-channels-images.storage-processor.data-source = imaging-db + +# --------------------------------------------------------------------------- + +# The directory to watch for incoming data. +image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis +image-analysis-results.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +# Separator used to extract the barcode in the data set file name +image-analysis-results.data-set-info-extractor.entity-separator = . +image-analysis-results.data-set-info-extractor.index-of-sample-code = 0 +image-analysis-results.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +image-analysis-results.type-extractor.file-format-type = CSV +image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION +image-analysis-results.type-extractor.data-set-type = HCS_ANALYSIS_WELL_FEATURES +image-analysis-results.type-extractor.is-measured = false + +# The storage processor (IStorageProcessor implementation) +image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor +image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor +image-analysis-results.storage-processor.data-source = imaging-db +# semi-colon (;) by default +image-analysis-results.storage-processor.separator = , +image-analysis-results.storage-processor.ignore-comments = true +image-analysis-results.storage-processor.well-name-row = row +image-analysis-results.storage-processor.well-name-col = col +image-analysis-results.storage-processor.well-name-col-is-alphanum = true + +# --- Example configuration of a dropbox for images which are not connected to wells on the plate + +# The directory to watch for incoming data. +#microscopy-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy +#microscopy-dropbox.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +#microscopy-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +#microscopy-dropbox.data-set-info-extractor.entity-separator = . +#microscopy-dropbox.data-set-info-extractor.index-of-sample-code = 0 +#microscopy-dropbox.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +#microscopy-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +#microscopy-dropbox.type-extractor.file-format-type = TIFF +#microscopy-dropbox.type-extractor.locator-type = RELATIVE_LOCATION +#microscopy-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE +#microscopy-dropbox.type-extractor.is-measured = true + +#microscopy-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyStorageProcessor +#microscopy-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyImageFileExtractor +#microscopy-dropbox.storage-processor.data-source = imaging-db +#microscopy-dropbox.storage-processor.channel-names = BLUE, GREEN, RED +#microscopy-dropbox.storage-processor.well_geometry = 2x3 +#microscopy-dropbox.storage-processor.tile_mapping = 1,2,3;4,5,6 + +# --- Microscopy dropbox with a series of images with any names --------------------------- + +# The directory to watch for incoming data. +#microscopy-series-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy-series +#microscopy-series-dropbox.incoming-data-completeness-condition = auto-detection + +# The extractor class to use for code extraction +#microscopy-series-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor +#microscopy-series-dropbox.data-set-info-extractor.entity-separator = . +#microscopy-series-dropbox.data-set-info-extractor.index-of-sample-code = 0 +#microscopy-series-dropbox.data-set-info-extractor.space-code = ${import-space-code} + +# The extractor class to use for type extraction +#microscopy-series-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor +#microscopy-series-dropbox.type-extractor.file-format-type = TIFF +#microscopy-series-dropbox.type-extractor.locator-type = RELATIVE_LOCATION +#microscopy-series-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE +#microscopy-series-dropbox.type-extractor.is-measured = true + +#microscopy-series-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyBlackboxSeriesStorageProcessor +#microscopy-series-dropbox.storage-processor.data-source = imaging-db