Skip to content
Snippets Groups Projects
service.properties 13.3 KiB
Newer Older
  • Learn to ignore specific revisions
  • # Unique code of this Data Store Server. Not more than 40 characters.
    data-store-server-code = DSS1
    
    # host name of the machine on which the datastore server is running
    
    host-address = https://$HOSTNAME
    
    
    # parent directory of the store directory and all the dropboxes
    
    root-dir = $DSS_ROOT_DIR
    
    
    # The root directory of the data store
    storeroot-dir = ${root-dir}/store
    
    # The directory where the command queue file is located; defaults to storeroot-dir 
    commandqueue-dir =
    
    # Port
    port = 8444
    use-ssl = true
    
    # Session timeout in minutes
    session-timeout = 720
    
    # Path to the keystore
    keystore.path = etc/openBIS.keystore
    # Password of the keystore
    keystore.password = changeit
    # Key password of the keystore
    keystore.key-password = changeit
    
    # The check interval (in seconds)
    
    check-interval = 5
    
    
    # The time-out for clean up work in the shutdown sequence (in seconds).
    # Note that that the maximal time for the shutdown sequence to complete can be as large 
    # as twice this time.
    # Remark: On a network file system, it is not recommended to turn this value to something 
    # lower than 180.
    shutdown-timeout = 180
    
    #If free disk space goes below value defined here, a notification email will be sent.
    # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
    # specified or if value is negative, the system will not be watching.
    highwater-mark = -1
    
    # If a data set is successfully registered it sends out an email to the registrator. 
    # If this property is not specified, no email is sent to the registrator. This property
    # does not affect the mails which are sent, when the data set could not be registered.
    notify-successful-registration = false
    
    # The URL of the openBIS server
    server-url = ${host-address}:8443
    
    # The username to use when contacting the openBIS server
    username = etlserver
    
    # The password for the etlserver user who contacts the openBIS server
    password = etlserver_password
    
    # The base URL for Web client access to the data store server.
    download-url = ${host-address}:${port}
    
    # SMTP properties (must start with 'mail' to be considered).
    # The current configuration saves the emails in the file system in the root directory 
    mail.smtp.host = file://${root-dir}
    # mail.smtp.host = localhost
    # mail.from = openbis-dss@localhost
    # mail.smtp.user = 
    # mail.smtp.password = 
    
    # ---------------------------------------------------------------------------
    # (optional) archiver configuration
    # ---------------------------------------------------------------------------
    
    # Configuration of an archiver task. All properties are prefixed with 'archiver.'.
    
    # Archiver class specification (together with the list of packages this class belongs to).
    #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
    
    # ---------------------------------------------------------------------------
    #                      INTERNAL CONFIGURATION, 
    # Do not change this part unless you are developing openBIS extensions.
    # ---------------------------------------------------------------------------
    
    # ---------------------------------------------------------------------------
    # screening database specification
    # ---------------------------------------------------------------------------
    
    data-sources = imaging-db, data-source
    imaging-db.version-holder-class = ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseVersionHolder
    imaging-db.databaseEngineCode = postgresql
    imaging-db.basicDatabaseName = imaging
    imaging-db.databaseKind = productive
    imaging-db.scriptFolder = sql/imaging
    imaging-db.owner =
    imaging-db.password = 
    # Credentials of a database user which is able to create a new database or roles in it.
    # Leave empty to use the db engines defaults.
    # Used only during the first start of the server or when server is upgraded to a new version.
    imaging-db.adminUser = 
    imaging-db.adminPassword =
    
    data-source.databaseEngineCode = postgresql
    data-source.basicDatabaseName = proteomics
    data-source.databaseKind = productive
    
    # ---------------------------------------------------------------------------
    # reporting and processing plugins configuration
    # ---------------------------------------------------------------------------
    
    # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
    reporting-plugins =  default-plate-image-analysis, plate-image-analysis-graph
    
    default-plate-image-analysis.label = Image Analysis Results
    default-plate-image-analysis.dataset-types = HCS_ANALYSIS_WELL_FEATURES
    default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisMergedRowsReportingPlugin
    default-plate-image-analysis.properties-file =
    
    plate-image-analysis-graph.label = Image Analysis Graphs
    plate-image-analysis-graph.dataset-types = HCS_ANALYSIS_WELL_FEATURES
    plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin
    plate-image-analysis-graph.servlet-path = datastore_server_graph/
    plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties
    
    # ---------------------------------------------------------------------------
    # screening specific extension servlets 
    # ---------------------------------------------------------------------------
    
    # list of additional web servlets which will be exposed
    plugin-services = screening-image-download-servlet, tabular-data-graph-servlet, screening-dss-api-exporter-servlet
    
    # class of the web servlet
    screening-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet
    # URL which will be mapped to this servlet
    screening-image-download-servlet.path = /datastore_server_screening/*
    
    tabular-data-graph-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet
    tabular-data-graph-servlet.path = /datastore_server_graph/*
    tabular-data-graph-servlet.properties-file = etc/tabular-data-graph.properties
    
    # expose an DSS API interface with RPC
    screening-dss-api-exporter-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.DssScreeningApiServlet
    screening-dss-api-exporter-servlet.path = /rmi-datastore-server-screening-api-v1/*
    
    # ---------------------------------------------------------------------------
    # image overview plugins configuration
    # ---------------------------------------------------------------------------
    
    # Comma separated names of image overview plugins. 
    # Each plugin should have configuration properties prefixed with its name.
    # Generic properties for each <plugin> include: 
    #   <plugin>.class   - Fully qualified plugin class name (mandatory).
    #   <plugin>.default - If true all data set types not handled by other plugins should be handled 
    #                      by the plugin (default = false). 
    #   <plugin>.dataset-types - Comma separated list of data set types handled by the plugin 
    #                      (optional and ignored if default is true, otherwise mandatory). 
    overview-plugins = microscopy-image-overview
    
    microscopy-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet
    microscopy-image-overview.dataset-types = MICROSCOPY_IMAGE
    
    # ---------------------------------------------------------------------------
    
    maintenance-plugins=data-set-clean-up,data-set-clean-up-proteomics
    # hierarchical-storage-updater
    
    # the plugin which is run periodically to create a mirror structure of the store with the same files
    # but with user-readable structure of directories  
    hierarchical-storage-updater.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater
    # specified in seconds. Here : every day
    hierarchical-storage-updater.interval = 86400
    hierarchical-storage-updater.hierarchy-root-dir = ${root-dir}/hierarchical-store
    
    # Removes data sets deleted from openBIS also from imaging database
    data-set-clean-up.class = ch.systemsx.cisd.openbis.dss.etl.DeleteFromImagingDBMaintenanceTask
    # specified in seconds. Here : every day
    data-set-clean-up.interval = 86400
    data-set-clean-up.data-source = imaging-db
    
    data-set-clean-up-proteomics.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
    data-set-clean-up-proteomics.interval = 300
    data-set-clean-up-proteomics.data-source = data-source
    data-set-clean-up-proteomics.data-set-table-name = data_sets 
    
    
    # ---------------------------------------------------------------------------
    #                      DROPBOXES CONFIGURATION 
    # ---------------------------------------------------------------------------
    
    incoming-root-dir = ${root-dir}
    
    # Globally used separator character which separates entities in a data set file name 
    data-set-file-name-entity-separator = _
    
    # The period of no write access that needs to pass before an incoming data item is considered 
    # complete and ready to be processed (in seconds) [default: 300]. 
    # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
    
    quiet-period = 10
    
    
    # code of the default space in openBIS to which the data will be imported
    import-space-code = DEMO
    
    # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
    # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
    
    inputs=simple-dropbox, hcs-dropbox, ms-injection, ms-search
    
    
    
    # ---------------------------------------------------------------------------
    # ---------------------------------------------------------------------------
    
    # ---------- Dropbox for unspecified data -----------------------------------
    # ---------------------------------------------------------------------------
    simple-dropbox.incoming-dir = ${incoming-root-dir}/incoming-simple
    simple-dropbox.incoming-data-completeness-condition = auto-detection
    simple-dropbox.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.JythonTopLevelDataSetHandler
    simple-dropbox.script-path = ${incoming-root-dir}/dropboxes/simple-dropbox.py
    simple-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
    
    # ---------------------------------------------------------------------------
    # ---------------------------------------------------------------------------
    # ---------- Dropbox for HCS images -----------------------------------------
    # ---------------------------------------------------------------------------
    hcs-dropbox.incoming-dir = ${incoming-root-dir}/incoming-hcs
    hcs-dropbox.incoming-data-completeness-condition = auto-detection
    hcs-dropbox.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.JythonPlateDataSetHandler
    hcs-dropbox.script-path = ${incoming-root-dir}/dropboxes/hcs-dropbox.py
    hcs-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
    hcs-dropbox.storage-processor.data-source = imaging-db
    hcs-dropbox.storage-processor.define-channels-per-experiment = false
    
    
    # ---------------------------------------------------------------------------
    # 'ms-injection' drop box for spectra data
    # ---------------------------------------------------------------------------
    # The directory to watch for incoming data.
    
    felmer's avatar
    felmer committed
    ms-injection.incoming-dir = ${incoming-root-dir}/incoming-ms-injection
    
    
    # Determines when the incoming data should be considered complete and ready to be processed.
    # Allowed values: 
    #  - auto-detection - when no write access will be detected for a specified 'quite-period'
    #  - marker-file		- when an appropriate marker file for the data exists. 
    # The default value is 'marker-file'.
    ms-injection.incoming-data-completeness-condition = auto-detection
    
    
    ms-injection.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.DataSetInfoExtractorForMSInjection
    
    ms-injection.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
    
    ms-injection.type-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.TypeExtractorForMSInjection
    
    
    # ---------------------------------------------------------------------------
    
    # 'ms-search' drop box for spectra data
    
    # ---------------------------------------------------------------------------
    # The directory to watch for incoming data.
    
    felmer's avatar
    felmer committed
    ms-search.incoming-dir = ${incoming-root-dir}/incoming-ms-search
    
    
    # Determines when the incoming data should be considered complete and ready to be processed.
    # Allowed values: 
    #  - auto-detection - when no write access will be detected for a specified 'quite-period'
    #  - marker-file		- when an appropriate marker file for the data exists. 
    # The default value is 'marker-file'.
    ms-search.incoming-data-completeness-condition = auto-detection
    
    
    ms-search.data-set-info-extractor = ch.systemsx.cisd.openbis.etlserver.proteomics.DataSetInfoExtractorForProteinResults
    
    ms-search.data-set-info-extractor.separator = +
    
    ms-search.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
    ms-search.type-extractor.file-format-type = XML
    ms-search.type-extractor.locator-type = RELATIVE_LOCATION
    ms-search.type-extractor.data-set-type = PROT_RESULT
    ms-search.type-extractor.is-measured = false
    
    
    ms-search.storage-processor = ch.systemsx.cisd.openbis.etlserver.proteomics.StorageProcessorWithResultDataSetUploader
    
    ms-search.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
    ms-search.storage-processor.assuming-extended-prot-xml = false
    ms-search.storage-processor.database.basic-name = ${data-source.basicDatabaseName}
    ms-search.storage-processor.database.kind = ${data-source.databaseKind}
    ms-search.storage-processor.database.owner = 
    ms-search.storage-processor.database.password =