Skip to content
Snippets Groups Projects
Commit ed4d3d68 authored by kaloyane's avatar kaloyane
Browse files

[LMS-2222]: minor installer improvements (added an empty jython dropbox)

SVN: 21178
parent 525d65dd
No related branches found
No related tags found
No related merge requests found
...@@ -55,6 +55,9 @@ ...@@ -55,6 +55,9 @@
</fileset> </fileset>
</copy> </copy>
<!-- this suffix will be used by the installer to check if there are database connections -->
<property name="installer.database.kind" value="screening" />
<izpack input="${installer.basedir}/install.xml" <izpack input="${installer.basedir}/install.xml"
output="${installer.jar.file}" output="${installer.jar.file}"
inheritAll="true" inheritAll="true"
......
...@@ -5,7 +5,7 @@ data-store-server-code = dss-screening ...@@ -5,7 +5,7 @@ data-store-server-code = dss-screening
host-address = https://localhost host-address = https://localhost
# parent directory of the store directory and all the dropboxes # parent directory of the store directory and all the dropboxes
root-dir = data root-dir = $DSS.ROOT-DIR
# The root directory of the data store # The root directory of the data store
storeroot-dir = ${root-dir}/store storeroot-dir = ${root-dir}/store
...@@ -91,13 +91,13 @@ imaging-db.databaseEngineCode = postgresql ...@@ -91,13 +91,13 @@ imaging-db.databaseEngineCode = postgresql
imaging-db.basicDatabaseName = imaging imaging-db.basicDatabaseName = imaging
imaging-db.databaseKind = productive imaging-db.databaseKind = productive
imaging-db.scriptFolder = sql imaging-db.scriptFolder = sql
imaging-db.owner = imaging-db.owner =${DB.USERNAME}
imaging-db.password = imaging-db.password = ${DB.PASSWORD}
# Credentials of a database user which is able to create a new database or roles in it. # Credentials of a database user which is able to create a new database or roles in it.
# Leave empty to use the db engines defaults. # Leave empty to use the db engines defaults.
# Used only during the first start of the server or when server is upgraded to a new version. # Used only during the first start of the server or when server is upgraded to a new version.
imaging-db.adminUser = imaging-db.adminUser = ${DB.ADMIN.USERNAME}
imaging-db.adminPassword = imaging-db.adminPassword = ${DB.ADMIN.PASSWORD}
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# reporting and processing plugins configuration # reporting and processing plugins configuration
...@@ -191,186 +191,27 @@ import-space-code = DEMO ...@@ -191,186 +191,27 @@ import-space-code = DEMO
# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
inputs=merged-channels-images, split-channels-images, image-analysis-results inputs=simple-dropbox
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# The directory to watch for incoming data.
merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels
merged-channels-images.incoming-data-completeness-condition = auto-detection
# The extractor class to use for code extraction
merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
merged-channels-images.data-set-info-extractor.entity-separator = .
merged-channels-images.data-set-info-extractor.index-of-sample-code = 0
merged-channels-images.data-set-info-extractor.index-of-data-producer-code =
merged-channels-images.data-set-info-extractor.space-code = ${import-space-code}
# The extractor class to use for type extraction
merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
merged-channels-images.type-extractor.file-format-type = JPG
merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
merged-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW
merged-channels-images.type-extractor.is-measured = true
# Note: this storage processor is able to process folders compressed with zip as well
merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
# How should the original data be stored? Possible values:
# unchanged - nothing is changed, the default
# hdf5 - all the data will be packaged into one hdf5 file
# hdf5_compressed - like hdf5, but each file is stored in a compressed form
merged-channels-images.storage-processor.original-data-storage-format = unchanged
# Should the thumbnails be generated?
# It slows down the dataset registration, but increases the performance when the user wants to see the image.
# Can be 'true' or 'false', 'false' is the default value
merged-channels-images.storage-processor.generate-thumbnails = false
# Thumbnails size in pixels
# merged-channels-images.storage-processor.thumbnail-max-width = 300
# merged-channels-images.storage-processor.thumbnail-max-height = 200
# Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_-].
# Number and order of entries must be consistent with 'channel-labels'.
merged-channels-images.storage-processor.channel-codes = GFP, DAPI
# Labels of the channels in which images have been acquired.
# Number and order of entries must be consistent with 'channel-codes'.
merged-channels-images.storage-processor.channel-labels = Gfp, Dapi
# Optional boolean property, true by default.
# Set to false to allow datasets in one experiment to use different channels.
# In this case 'channel-codes' and 'channel-labels' become optional and are used only to determine the label for each channel code.
# It should be set to 'false' for overlay image datasets.
#merged-channels-images.storage-processor.define-channels-per-experiment = false
# Optional boolean property, true by default.
# If true an email is sent if some images for the uploaded plate are missing.
#merged-channels-images.storage-processor.notify-if-incomplete = false
# Optional boolean property, true by default.
# If set to false then the dataset whcih cannot be registered will be left in the incoming folder
# and will be mentioned in the .faulty_paths file
#merged-channels-images.storage-processor.move-unregistered-datasets-to-error-dir = false
# This is an optional boolean property which defines if all image datasets in one experiment have the same
# channels or if each imported dataset can have different channels. By default true if not specified.
#merged-channels-images.storage-processor.define-channels-per-experiment = false
# Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images.
merged-channels-images.storage-processor.well_geometry = 3x3
# implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel
# Here: the extractor requireds that each image name should adhere to the schema:
# <any-text>_<plate-code>_<well-code>_<tile-code>_<channel-name>.<allowed-image-extension>
merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
# specification of the imaging db
merged-channels-images.storage-processor.data-source = imaging-db
# Optional comma separated list of color components.
# Available values: RED, GREEN or BLUE.
# If specified then the channels are extracted from the color components and override 'file-extractor' results.
merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# --- Jython dropbox for HCS images -----------------------------------------------
# The directory to watch for incoming data.
split-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-split-channels
split-channels-images.incoming-data-completeness-condition = auto-detection
# The extractor class to use for code extraction
split-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
split-channels-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
split-channels-images.data-set-info-extractor.index-of-sample-code = -1
split-channels-images.data-set-info-extractor.index-of-data-producer-code = 1
split-channels-images.data-set-info-extractor.index-of-data-production-date = 0
split-channels-images.data-set-info-extractor.space-code = ${import-space-code}
# The extractor class to use for type extraction
split-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
split-channels-images.type-extractor.file-format-type = JPG
split-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
split-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW
split-channels-images.type-extractor.is-measured = true
split-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
split-channels-images.storage-processor.generate-thumbnails = false
# The first specified channel will be blue, the second will be green and the third will be red.
# If there are more channels, combinations of colors will be used.
split-channels-images.storage-processor.channel-codes = GFP, DAPI
split-channels-images.storage-processor.channel-labels = Gfp, Dapi
split-channels-images.storage-processor.well_geometry = 3x3
split-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
split-channels-images.storage-processor.data-source = imaging-db
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# The directory to watch for incoming data. # The directory to watch for incoming data.
image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis simple-dropbox.incoming-dir = ${incoming-root-dir}/incoming-simple
image-analysis-results.incoming-data-completeness-condition = auto-detection simple-dropbox.incoming-data-completeness-condition = auto-detection
# The extractor class to use for code extraction
image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
# Separator used to extract the barcode in the data set file name
image-analysis-results.data-set-info-extractor.entity-separator = .
image-analysis-results.data-set-info-extractor.index-of-sample-code = 0
image-analysis-results.data-set-info-extractor.space-code = ${import-space-code}
# The extractor class to use for type extraction
image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
image-analysis-results.type-extractor.file-format-type = CSV
image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION
image-analysis-results.type-extractor.data-set-type = HCS_ANALYSIS_WELL_FEATURES
image-analysis-results.type-extractor.is-measured = false
# The storage processor (IStorageProcessor implementation)
image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor
image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
image-analysis-results.storage-processor.data-source = imaging-db
# semi-colon (;) by default
image-analysis-results.storage-processor.separator = ,
image-analysis-results.storage-processor.ignore-comments = true
image-analysis-results.storage-processor.well-name-row = row
image-analysis-results.storage-processor.well-name-col = col
image-analysis-results.storage-processor.well-name-col-is-alphanum = true
# --- Example configuration of a dropbox for images which are not connected to wells on the plate
# The directory to watch for incoming data.
#microscopy-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy
#microscopy-dropbox.incoming-data-completeness-condition = auto-detection
# The extractor class to use for code extraction # The extractor class to use for code extraction
#microscopy-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor simple-dropbox.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.JythonPlateDataSetHandler
#microscopy-dropbox.data-set-info-extractor.entity-separator = . simple-dropbox.script-path = ${incoming-root-dir}/dropboxes/simple-dropbox.py
#microscopy-dropbox.data-set-info-extractor.index-of-sample-code = 0
#microscopy-dropbox.data-set-info-extractor.space-code = ${import-space-code} simple-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
simple-dropbox.storage-processor.data-source = imaging-db
# The extractor class to use for type extraction #images-hcs-jython-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.jython.JythonImageFileExtractor
#microscopy-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor simple-dropbox.storage-processor.define-channels-per-experiment = false
#microscopy-dropbox.type-extractor.file-format-type = TIFF # -- Should the thumbnails be generated?
#microscopy-dropbox.type-extractor.locator-type = RELATIVE_LOCATION # It slows down the dataset registration, but increases the performance when the user wants to see the image.
#microscopy-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE # Can be 'true' or 'false', 'false' is the default value
#microscopy-dropbox.type-extractor.is-measured = true # mimages-hcs-jython-dropbox.storage-processor.generate-thumbnails = false
# images-hcs-jython-dropbox.storage-processor.compress-thumbnails = true
#microscopy-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyStorageProcessor
#microscopy-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyImageFileExtractor
#microscopy-dropbox.storage-processor.data-source = imaging-db
#microscopy-dropbox.storage-processor.channel-names = BLUE, GREEN, RED
#microscopy-dropbox.storage-processor.well_geometry = 2x3
#microscopy-dropbox.storage-processor.tile_mapping = 1,2,3;4,5,6
# --- Microscopy dropbox with a series of images with any names ---------------------------
# The directory to watch for incoming data.
#microscopy-series-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy-series
#microscopy-series-dropbox.incoming-data-completeness-condition = auto-detection
# The extractor class to use for code extraction
#microscopy-series-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
#microscopy-series-dropbox.data-set-info-extractor.entity-separator = .
#microscopy-series-dropbox.data-set-info-extractor.index-of-sample-code = 0
#microscopy-series-dropbox.data-set-info-extractor.space-code = ${import-space-code}
# The extractor class to use for type extraction
#microscopy-series-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
#microscopy-series-dropbox.type-extractor.file-format-type = TIFF
#microscopy-series-dropbox.type-extractor.locator-type = RELATIVE_LOCATION
#microscopy-series-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE
#microscopy-series-dropbox.type-extractor.is-measured = true
#microscopy-series-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyBlackboxSeriesStorageProcessor
#microscopy-series-dropbox.storage-processor.data-source = imaging-db
...@@ -37,6 +37,7 @@ ...@@ -37,6 +37,7 @@
<variable name="ShowCreateDirectoryMessage" value="no" /> <variable name="ShowCreateDirectoryMessage" value="no" />
<variable name="data.validation.error.title" value="Error" /> <variable name="data.validation.error.title" value="Error" />
<variable name="installer.reversetitle" value="$APP_NAME installation" /> <variable name="installer.reversetitle" value="$APP_NAME installation" />
<variable name="DB.NAME" value="openbis_@{installer.database.kind}" />
</variables> </variables>
<dynamicvariables> <dynamicvariables>
...@@ -95,7 +96,7 @@ ...@@ -95,7 +96,7 @@
<packs> <packs>
<pack name="openBIS Server" required="yes" loose="yes"> <pack name="openBIS Server" required="yes" loose="yes">
<description>The openBIS application server</description> <description>The openBIS application server</description>
<file src="openBIS-server-screening-SNAPSHOT-r21061.zip" targetdir="$INSTALL_PATH/servers" unpack="true" /> <file src="openBIS-server-screening-SNAPSHOT-r21158.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
<parsable targetfile="$INSTALL_OPENBIS_PATH/service.properties" /> <parsable targetfile="$INSTALL_OPENBIS_PATH/service.properties" />
<!-- run post installation script for the openBIS server --> <!-- run post installation script for the openBIS server -->
<executable targetfile="$INSTALL_OPENBIS_PATH/install.sh" stage="postinstall"> <executable targetfile="$INSTALL_OPENBIS_PATH/install.sh" stage="postinstall">
...@@ -108,8 +109,17 @@ ...@@ -108,8 +109,17 @@
<pack name="Datastore Server" required="yes" loose="yes"> <pack name="Datastore Server" required="yes" loose="yes">
<description>The data store server managing raw data</description> <description>The data store server managing raw data</description>
<file src="datastore_server-screening-SNAPSHOT-r21061.zip" targetdir="$INSTALL_PATH/servers" unpack="true" /> <file src="datastore_server-screening-SNAPSHOT-r21158.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
<parsable targetfile="$INSTALL_DSS_PATH/etc/service.properties" /> <parsable targetfile="$INSTALL_DSS_PATH/etc/service.properties" />
<!-- create a log folder and store folder -->
<executable targetfile="/bin/mkdir" stage="postinstall" keep="true">
<args>
<arg value="-p" />
<arg value="$INSTALL_DSS_PATH/log" />
<arg value="$DSS.ROOT-DIR/store" />
</args>
</executable>
</pack> </pack>
<pack name="Administration Scripts" required="yes" loose="yes"> <pack name="Administration Scripts" required="yes" loose="yes">
...@@ -140,6 +150,13 @@ ...@@ -140,6 +150,13 @@
</args> </args>
</executable> </executable>
</pack> </pack>
<pack name="HCS Jython dropboxes" required="yes" loose="yes">
<description>A set of minimal Jython dropboxes for importing HCS data into openBIS</description>
<file src="@{installer.resourcedir}/dropboxes" targetdir="$DSS.ROOT-DIR" />
<file src="@{installer.resourcedir}/incoming-sample" targetdir="$DSS.ROOT-DIR" />
</pack>
</packs> </packs>
</installation> </installation>
...@@ -22,11 +22,16 @@ ...@@ -22,11 +22,16 @@
</spec> </spec>
</field> </field>
<field type="text" variable="DB.KIND"> <field type="text" variable="DB.ADMIN.USERNAME">
<spec txt="DB Name Suffix" size="20" set="screening" /> <spec txt="Admin Username" size="20" set="postgres"/>
</field> </field>
<field type="staticText" align="left" txt=" e.g. 'demo' will result in connecting to 'openbis_demo'" />
<field type="password" variable="DB.ADMIN.PASSWORD">
<spec>
<pwd txt="Password" size="20" set=""/>
</spec>
</field>
<field type="staticText" align="left" txt="(NOTE: Password fileds could be left empty if IDENT authentication is used)" />
</panel> </panel>
<panel order="1"> <panel order="1">
......
...@@ -15,18 +15,18 @@ database.engine = postgresql ...@@ -15,18 +15,18 @@ database.engine = postgresql
database.create-from-scratch = false database.create-from-scratch = false
# For debugging set this value to true. # For debugging set this value to true.
database.script-single-step-mode = false database.script-single-step-mode = false
database.url-host-part = database.url-host-part = //${DB.HOST}:${DB.PORT}/
database.kind = screening database.kind = screening
database.owner = database.owner = ${DB.USERNAME}
database.owner-password = database.owner-password = ${DB.PASSWORD}
database.admin-user = database.admin-user = ${DB.ADMIN.USERNAME}
database.admin-password = database.admin-password = ${DB.ADMIN.PASSWORD}
data-source-provider = dss-based-data-source-provider data-source-provider = dss-based-data-source-provider
dss-based-data-source-provider.data-store-servers = dss-screening dss-based-data-source-provider.data-store-servers = dss-screening
dss-based-data-source-provider.dss-screening.database-driver = org.postgresql.Driver dss-based-data-source-provider.dss-screening.database-driver = org.postgresql.Driver
dss-based-data-source-provider.dss-screening.database-url = jdbc:postgresql://localhost/imaging_productive dss-based-data-source-provider.dss-screening.database-url = jdbc:postgresql://${DB.HOST}:${DB.PORT}/imaging_productive
#crowd.service.host = crowd-bsse.ethz.ch #crowd.service.host = crowd-bsse.ethz.ch
#crowd.service.port = 8443 #crowd.service.port = 8443
......
...@@ -60,12 +60,20 @@ public class DBConnectionValidator implements DataValidator ...@@ -60,12 +60,20 @@ public class DBConnectionValidator implements DataValidator
String port = data.getVariable("DB.PORT"); String port = data.getVariable("DB.PORT");
String username = data.getVariable("DB.USERNAME"); String username = data.getVariable("DB.USERNAME");
String password = data.getVariable("DB.PASSWORD"); String password = data.getVariable("DB.PASSWORD");
String dbname = "openbis_" + data.getVariable("DB.KIND"); String adminUsername = data.getVariable("DB.ADMIN.USERNAME");
String adminPassword = data.getVariable("DB.ADMIN.PASSWORD");
String dbname = data.getVariable("DB.NAME");
String connectionString = "jdbc:postgresql://" + hostname + ":" + port + "/" + dbname; String connectionString = "jdbc:postgresql://" + hostname + ":" + port + "/" + dbname;
boolean ok = testConnectionOK(connectionString, username, password); if (testConnectionOK(connectionString, username, password))
return ok ? Status.OK : Status.ERROR; {
if (testConnectionOK(connectionString, adminUsername, adminPassword))
{
return Status.OK;
}
}
return Status.ERROR;
} }
private boolean testConnectionOK(String connectionString, private boolean testConnectionOK(String connectionString,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment