diff --git a/screening/build/build.xml b/screening/build/build.xml
index 51d39cd7777532e62ae30ba34eb17a303b4e0b1e..d641cc07ee542aa3a52caabc55adfa72a5d52b8e 100644
--- a/screening/build/build.xml
+++ b/screening/build/build.xml
@@ -55,6 +55,9 @@
 		    </fileset>
 		</copy>
 		
+		<!-- this suffix will be used by the installer to check if there are database connections -->
+	  <property name="installer.database.kind" value="screening" />
+		
 		<izpack input="${installer.basedir}/install.xml"
 		        output="${installer.jar.file}"
 			      inheritAll="true"
diff --git a/screening/dist/etc/service.properties b/screening/dist/etc/service.properties
index be3a27e046689a3b53835e40df77f2f464435aea..21747cab699baa45dd6dbf022552beee27e0d15e 100644
--- a/screening/dist/etc/service.properties
+++ b/screening/dist/etc/service.properties
@@ -5,7 +5,7 @@ data-store-server-code = dss-screening
 host-address = https://localhost
 
 # parent directory of the store directory and all the dropboxes
-root-dir = data
+root-dir = $DSS.ROOT-DIR
 
 # The root directory of the data store
 storeroot-dir = ${root-dir}/store
@@ -91,13 +91,13 @@ imaging-db.databaseEngineCode = postgresql
 imaging-db.basicDatabaseName = imaging
 imaging-db.databaseKind = productive
 imaging-db.scriptFolder = sql
-imaging-db.owner =
-imaging-db.password = 
+imaging-db.owner =${DB.USERNAME}
+imaging-db.password = ${DB.PASSWORD}
 # Credentials of a database user which is able to create a new database or roles in it.
 # Leave empty to use the db engines defaults.
 # Used only during the first start of the server or when server is upgraded to a new version.
-imaging-db.adminUser = 
-imaging-db.adminPassword =
+imaging-db.adminUser = ${DB.ADMIN.USERNAME}
+imaging-db.adminPassword = ${DB.ADMIN.PASSWORD}
 
 # ---------------------------------------------------------------------------
 # reporting and processing plugins configuration
@@ -191,186 +191,27 @@ import-space-code = DEMO
 
 # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
 # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
-inputs=merged-channels-images, split-channels-images, image-analysis-results
+inputs=simple-dropbox
 
 # ---------------------------------------------------------------------------
-
-# The directory to watch for incoming data.
-merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels
-merged-channels-images.incoming-data-completeness-condition = auto-detection
-
-# The extractor class to use for code extraction
-merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-merged-channels-images.data-set-info-extractor.entity-separator = .
-merged-channels-images.data-set-info-extractor.index-of-sample-code = 0
-merged-channels-images.data-set-info-extractor.index-of-data-producer-code = 
-merged-channels-images.data-set-info-extractor.space-code = ${import-space-code}
-
-# The extractor class to use for type extraction
-merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-merged-channels-images.type-extractor.file-format-type = JPG
-merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
-merged-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW
-merged-channels-images.type-extractor.is-measured = true
-
-# Note: this storage processor is able to process folders compressed with zip as well
-merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
-# How should the original data be stored? Possible values:
-#   unchanged       - nothing is changed, the default
-#   hdf5            - all the data will be packaged into one hdf5 file
-#   hdf5_compressed - like hdf5, but each file is stored in a compressed form
-merged-channels-images.storage-processor.original-data-storage-format = unchanged
-# Should the thumbnails be generated? 
-# It slows down the dataset registration, but increases the performance when the user wants to see the image. 
-# Can be 'true' or 'false', 'false' is the default value
-merged-channels-images.storage-processor.generate-thumbnails = false
-# Thumbnails size in pixels
-# merged-channels-images.storage-processor.thumbnail-max-width = 300
-# merged-channels-images.storage-processor.thumbnail-max-height = 200
-# Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_-]. 
-# Number and order of entries must be consistent with 'channel-labels'.
-merged-channels-images.storage-processor.channel-codes = GFP, DAPI
-# Labels of the channels in which images have been acquired. 
-# Number and order of entries must be consistent with 'channel-codes'.
-merged-channels-images.storage-processor.channel-labels = Gfp, Dapi
-
-# Optional boolean property, true by default. 
-# Set to false to allow datasets in one experiment to use different channels.
-# In this case 'channel-codes' and 'channel-labels' become optional and are used only to determine the label for each channel code.
-# It should be set to 'false' for overlay image datasets. 
-#merged-channels-images.storage-processor.define-channels-per-experiment = false
-
-# Optional boolean property, true by default. 
-# If true an email is sent if some images for the uploaded plate are missing. 
-#merged-channels-images.storage-processor.notify-if-incomplete = false
-
-# Optional boolean property, true by default. 
-# If set to false then the dataset whcih cannot be registered will be left in the incoming folder 
-# and will be mentioned in the .faulty_paths file
-#merged-channels-images.storage-processor.move-unregistered-datasets-to-error-dir = false
-    
-# This is an optional boolean property which defines if all image datasets in one experiment have the same
-# channels or if each imported dataset can have different channels. By default true if not specified.
-#merged-channels-images.storage-processor.define-channels-per-experiment = false
-# Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images.
-merged-channels-images.storage-processor.well_geometry = 3x3
-# implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel
-# Here: the extractor requireds that each image name should adhere to the schema:
-#     <any-text>_<plate-code>_<well-code>_<tile-code>_<channel-name>.<allowed-image-extension>
-merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
-# specification of the imaging db
-merged-channels-images.storage-processor.data-source = imaging-db
-# Optional comma separated list of color components. 
-# Available values: RED, GREEN or BLUE. 
-# If specified then the channels are extracted from the color components and override 'file-extractor' results.
-merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE
-
 # ---------------------------------------------------------------------------
-
-# The directory to watch for incoming data.
-split-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-split-channels
-split-channels-images.incoming-data-completeness-condition = auto-detection
-
-# The extractor class to use for code extraction
-split-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-split-channels-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-split-channels-images.data-set-info-extractor.index-of-sample-code = -1
-split-channels-images.data-set-info-extractor.index-of-data-producer-code = 1
-split-channels-images.data-set-info-extractor.index-of-data-production-date = 0
-split-channels-images.data-set-info-extractor.space-code = ${import-space-code}
-
-# The extractor class to use for type extraction
-split-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-split-channels-images.type-extractor.file-format-type = JPG
-split-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
-split-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW
-split-channels-images.type-extractor.is-measured = true
-
-split-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
-split-channels-images.storage-processor.generate-thumbnails = false
-# The first specified channel will be blue, the second will be green and the third will be red.
-# If there are more channels, combinations of colors will be used.
-split-channels-images.storage-processor.channel-codes = GFP, DAPI
-split-channels-images.storage-processor.channel-labels = Gfp, Dapi
-split-channels-images.storage-processor.well_geometry = 3x3
-split-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
-split-channels-images.storage-processor.data-source = imaging-db
-
+# --- Jython dropbox for HCS images -----------------------------------------------
 # ---------------------------------------------------------------------------
 
 # The directory to watch for incoming data.
-image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis
-image-analysis-results.incoming-data-completeness-condition = auto-detection
-
-# The extractor class to use for code extraction
-image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-# Separator used to extract the barcode in the data set file name
-image-analysis-results.data-set-info-extractor.entity-separator = .
-image-analysis-results.data-set-info-extractor.index-of-sample-code = 0
-image-analysis-results.data-set-info-extractor.space-code = ${import-space-code}
-
-# The extractor class to use for type extraction
-image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-image-analysis-results.type-extractor.file-format-type = CSV
-image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION
-image-analysis-results.type-extractor.data-set-type = HCS_ANALYSIS_WELL_FEATURES
-image-analysis-results.type-extractor.is-measured = false
-
-# The storage processor (IStorageProcessor implementation)
-image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor
-image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
-image-analysis-results.storage-processor.data-source = imaging-db
-# semi-colon (;) by default 
-image-analysis-results.storage-processor.separator = ,
-image-analysis-results.storage-processor.ignore-comments = true
-image-analysis-results.storage-processor.well-name-row = row
-image-analysis-results.storage-processor.well-name-col = col
-image-analysis-results.storage-processor.well-name-col-is-alphanum = true
-
-# --- Example configuration of a dropbox for images which are not connected to wells on the plate
-
-# The directory to watch for incoming data.
-#microscopy-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy
-#microscopy-dropbox.incoming-data-completeness-condition = auto-detection
+simple-dropbox.incoming-dir = ${incoming-root-dir}/incoming-simple
+simple-dropbox.incoming-data-completeness-condition = auto-detection
 
 # The extractor class to use for code extraction
-#microscopy-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-#microscopy-dropbox.data-set-info-extractor.entity-separator = .
-#microscopy-dropbox.data-set-info-extractor.index-of-sample-code = 0
-#microscopy-dropbox.data-set-info-extractor.space-code = ${import-space-code}
-
-# The extractor class to use for type extraction
-#microscopy-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-#microscopy-dropbox.type-extractor.file-format-type = TIFF
-#microscopy-dropbox.type-extractor.locator-type = RELATIVE_LOCATION
-#microscopy-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE
-#microscopy-dropbox.type-extractor.is-measured = true
-
-#microscopy-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyStorageProcessor
-#microscopy-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyImageFileExtractor
-#microscopy-dropbox.storage-processor.data-source = imaging-db
-#microscopy-dropbox.storage-processor.channel-names = BLUE, GREEN, RED
-#microscopy-dropbox.storage-processor.well_geometry = 2x3
-#microscopy-dropbox.storage-processor.tile_mapping = 1,2,3;4,5,6
-
-# --- Microscopy dropbox with a series of images with any names ---------------------------
-
-# The directory to watch for incoming data.
-#microscopy-series-dropbox.incoming-dir = ${incoming-root-dir}/incoming-microscopy-series
-#microscopy-series-dropbox.incoming-data-completeness-condition = auto-detection
-
-# The extractor class to use for code extraction
-#microscopy-series-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-#microscopy-series-dropbox.data-set-info-extractor.entity-separator = .
-#microscopy-series-dropbox.data-set-info-extractor.index-of-sample-code = 0
-#microscopy-series-dropbox.data-set-info-extractor.space-code = ${import-space-code}
-
-# The extractor class to use for type extraction
-#microscopy-series-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-#microscopy-series-dropbox.type-extractor.file-format-type = TIFF
-#microscopy-series-dropbox.type-extractor.locator-type = RELATIVE_LOCATION
-#microscopy-series-dropbox.type-extractor.data-set-type = MICROSCOPY_IMAGE
-#microscopy-series-dropbox.type-extractor.is-measured = true
-
-#microscopy-series-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.MicroscopyBlackboxSeriesStorageProcessor
-#microscopy-series-dropbox.storage-processor.data-source = imaging-db
+simple-dropbox.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.JythonPlateDataSetHandler
+simple-dropbox.script-path = ${incoming-root-dir}/dropboxes/simple-dropbox.py
+
+simple-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
+simple-dropbox.storage-processor.data-source = imaging-db
+#images-hcs-jython-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.jython.JythonImageFileExtractor
+simple-dropbox.storage-processor.define-channels-per-experiment = false
+# -- Should the thumbnails be generated? 
+# It slows down the dataset registration, but increases the performance when the user wants to see the image. 
+# Can be 'true' or 'false', 'false' is the default value
+# mimages-hcs-jython-dropbox.storage-processor.generate-thumbnails = false
+# images-hcs-jython-dropbox.storage-processor.compress-thumbnails = true
diff --git a/screening/dist/installer/dropboxes/simple-dropbox.py b/screening/dist/installer/dropboxes/simple-dropbox.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/screening/dist/installer/incoming-simple/.gitignore b/screening/dist/installer/incoming-simple/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/screening/dist/installer/install.xml b/screening/dist/installer/install.xml
index dc27ed89bc962a519454df8914b12d81865ddb82..fe2ef1972220428d84c64502d3b7f918feab5649 100644
--- a/screening/dist/installer/install.xml
+++ b/screening/dist/installer/install.xml
@@ -37,6 +37,7 @@
     <variable name="ShowCreateDirectoryMessage"  value="no" />
     <variable name="data.validation.error.title"  value="Error" />
     <variable name="installer.reversetitle"  value="$APP_NAME installation" />
+    <variable name="DB.NAME"  value="openbis_@{installer.database.kind}" />
   </variables>
 
   <dynamicvariables>
@@ -95,7 +96,7 @@
   <packs>
     <pack name="openBIS Server" required="yes" loose="yes">
       <description>The openBIS application server</description>
-      <file src="openBIS-server-screening-SNAPSHOT-r21061.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
+      <file src="openBIS-server-screening-SNAPSHOT-r21158.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
       <parsable targetfile="$INSTALL_OPENBIS_PATH/service.properties" />
       <!-- run post installation script for the openBIS server -->
       <executable targetfile="$INSTALL_OPENBIS_PATH/install.sh" stage="postinstall">
@@ -108,8 +109,17 @@
 
     <pack name="Datastore Server" required="yes" loose="yes">
       <description>The data store server managing raw data</description>
-      <file src="datastore_server-screening-SNAPSHOT-r21061.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
+      <file src="datastore_server-screening-SNAPSHOT-r21158.zip" targetdir="$INSTALL_PATH/servers" unpack="true" />
       <parsable targetfile="$INSTALL_DSS_PATH/etc/service.properties" />
+      
+      <!-- create a log folder and store folder -->
+      <executable targetfile="/bin/mkdir" stage="postinstall" keep="true">
+        <args>
+          <arg value="-p" />
+          <arg value="$INSTALL_DSS_PATH/log" />
+          <arg value="$DSS.ROOT-DIR/store" />
+        </args>
+      </executable>
     </pack>
 
     <pack name="Administration Scripts" required="yes" loose="yes">
@@ -140,6 +150,13 @@
         </args>
       </executable>
     </pack>
+    
+    <pack name="HCS Jython dropboxes" required="yes" loose="yes">
+      <description>A set of minimal Jython dropboxes for importing HCS data into openBIS</description>
+      <file src="@{installer.resourcedir}/dropboxes" targetdir="$DSS.ROOT-DIR" />
+      <file src="@{installer.resourcedir}/incoming-sample" targetdir="$DSS.ROOT-DIR" />
+    </pack>
+    
   </packs>
 
 </installation>
diff --git a/screening/dist/installer/sample-datasets/.gitignore b/screening/dist/installer/sample-datasets/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/screening/dist/installer/userInputSpec.xml b/screening/dist/installer/userInputSpec.xml
index 333ae0628c7229c45ad7db19ff6b8b87ddfea953..6d0f0e3d20ef374f3340c04582a015a12b90b33a 100644
--- a/screening/dist/installer/userInputSpec.xml
+++ b/screening/dist/installer/userInputSpec.xml
@@ -22,11 +22,16 @@
       </spec>
     </field>
     
-    <field type="text" variable="DB.KIND">
-      <spec txt="DB Name Suffix" size="20" set="screening" />
+    <field type="text" variable="DB.ADMIN.USERNAME">
+      <spec txt="Admin Username" size="20" set="postgres"/>
     </field>
-    <field type="staticText" align="left" txt=" e.g. 'demo' will result in connecting to 'openbis_demo'" />
     
+    <field type="password" variable="DB.ADMIN.PASSWORD">
+      <spec>
+          <pwd txt="Password" size="20" set=""/>
+      </spec>
+    </field>
+    <field type="staticText" align="left" txt="(NOTE: Password fileds could be left empty if IDENT authentication is used)" />
   </panel>
   
   <panel order="1">
diff --git a/screening/dist/server/service.properties b/screening/dist/server/service.properties
index 4ec7a78752383a11189b2329dd691c86f187cda5..1da4dca69ad30d7a0c834ae025cd01ab33c48717 100644
--- a/screening/dist/server/service.properties
+++ b/screening/dist/server/service.properties
@@ -15,18 +15,18 @@ database.engine = postgresql
 database.create-from-scratch = false
 # For debugging set this value to true.
 database.script-single-step-mode = false
-database.url-host-part =
+database.url-host-part = //${DB.HOST}:${DB.PORT}/
 database.kind = screening
-database.owner =
-database.owner-password = 
-database.admin-user = 
-database.admin-password =
+database.owner = ${DB.USERNAME}
+database.owner-password = ${DB.PASSWORD}
+database.admin-user = ${DB.ADMIN.USERNAME}
+database.admin-password = ${DB.ADMIN.PASSWORD}
 
 data-source-provider = dss-based-data-source-provider
 
 dss-based-data-source-provider.data-store-servers = dss-screening
 dss-based-data-source-provider.dss-screening.database-driver = org.postgresql.Driver
-dss-based-data-source-provider.dss-screening.database-url = jdbc:postgresql://localhost/imaging_productive
+dss-based-data-source-provider.dss-screening.database-url = jdbc:postgresql://${DB.HOST}:${DB.PORT}/imaging_productive
 
 #crowd.service.host = crowd-bsse.ethz.ch
 #crowd.service.port = 8443
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/installer/izpack/DBConnectionValidator.java b/screening/source/java/ch/systemsx/cisd/openbis/installer/izpack/DBConnectionValidator.java
index 0cea55e2be5df8ebbfb1d8496b349cf32a14b5d2..5355fac46adc6cfeb515b0b59d1d59d96bfac9d7 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/installer/izpack/DBConnectionValidator.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/installer/izpack/DBConnectionValidator.java
@@ -60,12 +60,20 @@ public class DBConnectionValidator implements DataValidator
         String port = data.getVariable("DB.PORT");
         String username = data.getVariable("DB.USERNAME");
         String password = data.getVariable("DB.PASSWORD");
-        String dbname = "openbis_" + data.getVariable("DB.KIND");
+        String adminUsername = data.getVariable("DB.ADMIN.USERNAME");
+        String adminPassword = data.getVariable("DB.ADMIN.PASSWORD");
+        String dbname = data.getVariable("DB.NAME");
 
         String connectionString = "jdbc:postgresql://" + hostname + ":" + port + "/" + dbname;
 
-        boolean ok = testConnectionOK(connectionString, username, password);
-        return ok ? Status.OK : Status.ERROR;
+        if (testConnectionOK(connectionString, username, password))
+        {
+            if (testConnectionOK(connectionString, adminUsername, adminPassword))
+            {
+                return Status.OK;
+            }
+        }
+        return Status.ERROR;
     }
 
     private boolean testConnectionOK(String connectionString,