diff --git a/screening/etc/service.properties b/screening/etc/service.properties
index 0379825dd85f21986ecf2e5a5c0c9ececdb0487b..9b320447a542598eca13ab75269048ec454c8b78 100644
--- a/screening/etc/service.properties
+++ b/screening/etc/service.properties
@@ -1,343 +1,272 @@
 # Unique code of this Data Store Server. Not more than 40 characters.
-data-store-server-code = dss-screening
-
+data-store-server-code=dss-screening
 # host name of the machine on which the datastore server is running
-host-address = http://localhost
-
+host-address=http://localhost
 # parent directory of the store directory and all the dropboxes
-root-dir = targets
-
+root-dir=targets
 # The root directory of the data store
-storeroot-dir = ${root-dir}/store
-
+storeroot-dir=${root-dir}/store
 core-plugins-folder=../screening/source/core-plugins
-
-# The directory where the command queue file is located; defaults to storeroot-dir 
-commandqueue-dir =
-
+# The directory where the command queue file is located; defaults to storeroot-dir
+commandqueue-dir=
 # Port
-port = 8889
-use-ssl = false
-
+port=8889
+use-ssl=false
 # Session timeout in minutes
-session-timeout = 30
-
+session-timeout=30
 # Path to the keystore
-keystore.path = ../datastore_server/dist/etc/openBIS.keystore
+keystore.path=../server-original-data-store/dist/etc/openBIS.keystore
 # Password of the keystore
-keystore.password = changeit
+keystore.password=changeit
 # Key password of the keystore
-keystore.key-password = changeit
-
+keystore.key-password=changeit
 # The check interval (in seconds)
-check-interval = 5
-
+check-interval=5
 # The time-out for clean up work in the shutdown sequence (in seconds).
 # Note that that the maximal time for the shutdown sequence to complete can be as large 
 # as twice this time.
 # Remark: On a network file system, it is not recommended to turn this value to something 
 # lower than 180.
-shutdown-timeout = 2
-
+shutdown-timeout=2
 # If free disk space goes below value defined here, a notification email will be sent.
 # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
 # specified or if value is negative, the system will not be watching.
-highwater-mark = 1000
-
-# If a data set is successfully registered it sends out an email to the registrator. 
+highwater-mark=1000
+# If a data set is successfully registered it sends out an email to the registrator.
 # If this property is not specified, no email is sent to the registrator. This property
 # does not affect the mails which are sent, when the data set could not be registered.
-notify-successful-registration = false
-
+notify-successful-registration=false
 # The URL of the openBIS server
-server-url = ${host-address}:8888
-
+server-url=${host-address}:8888
 # The username to use when contacting the openBIS server
-username = etlserver
-
+username=etlserver
 # The password for the etlserver user who contacts the openBIS server
-password = etlserver_password
-
+password=etlserver_password
 # The base URL for Web client access to the data store server.
-download-url = ${host-address}:8889
-
+download-url=${host-address}:8889
 # SMTP properties (must start with 'mail' to be considered).
 # The current configuration saves the emails in the file system in the root directory 
-mail.smtp.host = file://${root-dir}/emails
+mail.smtp.host=file://${root-dir}/emails
 # mail.smtp.host = localhost
 # mail.from = datastore_server@ethz.ch
 # mail.smtp.user = 
 # mail.smtp.password = 
-
-dss-registration-log-dir = ${root-dir}/dss-registration-logs
-dss-recovery-state-dir = ${root-dir}/recovery-state
-
+dss-registration-log-dir=${root-dir}/dss-registration-logs
+dss-recovery-state-dir=${root-dir}/recovery-state
 # ---------------------------------------------------------------------------
 # (optional) archiver configuration
 # ---------------------------------------------------------------------------
-
 # Configuration of an archiver task. All properties are prefixed with 'archiver.'.
-
 # Archiver class specification (together with the list of packages this class belongs to).
 #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
-
 # ---------------------------------------------------------------------------
 #                      INTERNAL CONFIGURATION, 
 # Do not change this part unless you are developing openBIS extensions.
 # ---------------------------------------------------------------------------
-
 # ---------------------------------------------------------------------------
 # screening database specification
 # ---------------------------------------------------------------------------
-
-data-sources = path-info-db
-
+data-sources=path-info-db
 # Data source for pathinfo database
-path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
-path-info-db.databaseEngineCode = postgresql
-path-info-db.basicDatabaseName = pathinfo
-path-info-db.databaseKind = dev
-path-info-db.scriptFolder = ../datastore_server/source/sql
-
-imaging-database.kind = dev
-screening-sql-root-folder = source/
-
+path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
+path-info-db.databaseEngineCode=postgresql
+path-info-db.basicDatabaseName=pathinfo
+path-info-db.databaseKind=dev
+path-info-db.scriptFolder=../server-original-data-store/source/sql
+imaging-database.kind=dev
+screening-sql-root-folder=source/
 # ---------------------------------------------------------------------------
 # reporting and processing plugins configuration
 # ---------------------------------------------------------------------------
-
 # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
-reporting-plugins =  plate-image-analysis-graph, csv-viewer
-
-plate-image-analysis-graph.label = Image Analysis Graphs
-plate-image-analysis-graph.dataset-types = HCS_IMAGE_ANALYSIS_DATA, HCS_ANALYSIS_PER_GENE
-plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin
-plate-image-analysis-graph.servlet-path = datastore_server_graph/
-plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties
-plate-image-analysis-graph.servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet
-plate-image-analysis-graph.servlet.path = /${plate-image-analysis-graph.servlet-path}*
-plate-image-analysis-graph.servlet.properties-file = ${plate-image-analysis-graph.properties-file}
-
-
-csv-viewer.label = CSV View 
-csv-viewer.dataset-types = HCS_IMAGE_ANALYSIS_DATA
-csv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
-csv-viewer.separator = ,
-
-
+reporting-plugins=plate-image-analysis-graph, csv-viewer
+plate-image-analysis-graph.label=Image Analysis Graphs
+plate-image-analysis-graph.dataset-types=HCS_IMAGE_ANALYSIS_DATA, HCS_ANALYSIS_PER_GENE
+plate-image-analysis-graph.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin
+plate-image-analysis-graph.servlet-path=datastore_server_graph/
+plate-image-analysis-graph.properties-file=etc/tabular-data-graph.properties
+plate-image-analysis-graph.servlet.class=ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet
+plate-image-analysis-graph.servlet.path=/${plate-image-analysis-graph.servlet-path}*
+plate-image-analysis-graph.servlet.properties-file=${plate-image-analysis-graph.properties-file}
+csv-viewer.label=CSV View 
+csv-viewer.dataset-types=HCS_IMAGE_ANALYSIS_DATA
+csv-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
+csv-viewer.separator=,
 # ---------------------------------------------------------------------------
-
-maintenance-plugins = path-info-deletion, post-registration
-
+maintenance-plugins=path-info-deletion, post-registration
 # Maintenance task (performed only once) to create paths of existing data sets in pathinfo database
-path-info-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
-path-info-feeding.execute-only-once = true
-
+path-info-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
+path-info-feeding.execute-only-once=true
 # Maintenance task for deleting entries in pathinfo database after deletion of data sets
-path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
-path-info-deletion.interval = 120
-path-info-deletion.data-source = path-info-db
-path-info-deletion.data-set-table-name = data_sets
-path-info-deletion.data-set-perm-id = CODE
-
-# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database  
-post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
-post-registration.interval = 30
-post-registration.cleanup-tasks-folder = targets/cleanup-tasks
+path-info-deletion.class=ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
+path-info-deletion.interval=120
+path-info-deletion.data-source=path-info-db
+path-info-deletion.data-set-table-name=data_sets
+path-info-deletion.data-set-perm-id=CODE
+# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
+post-registration.class=ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
+post-registration.interval=30
+post-registration.cleanup-tasks-folder=targets/cleanup-tasks
 # The following date should the day when the DDS is started up the first time with PathInfoDatabaseFeedingTask.
 # After PathInfoDatabaseFeedingTask has been performed it can be removed and the following line can be deleted.
 #post-registration.ignore-data-sets-before-date = 2011-04-18
-post-registration.last-seen-data-set-file = targets/last-seen-data-set
-post-registration.post-registration-tasks = pathinfo-feeding
-post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
-
+post-registration.last-seen-data-set-file=targets/last-seen-data-set
+post-registration.post-registration-tasks=pathinfo-feeding
+post-registration.pathinfo-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
 # ---------------------------------------------------------------------------
 #                      DROPBOXES CONFIGURATION 
 # ---------------------------------------------------------------------------
-
-incoming-root-dir = ${root-dir}
-
-# True if incoming directories should be created on server startup if they don't exist. 
+incoming-root-dir=${root-dir}
+# True if incoming directories should be created on server startup if they don't exist.
 # Default - false (server will fail at startup if one of incoming directories doesn't exist). 
-incoming-dir-create = true
-
-# Globally used separator character which separates entities in a data set file name 
-data-set-file-name-entity-separator = _
-
-# The period of no write access that needs to pass before an incoming data item is considered 
+incoming-dir-create=true
+# Globally used separator character which separates entities in a data set file name
+data-set-file-name-entity-separator=_
+# The period of no write access that needs to pass before an incoming data item is considered
 # complete and ready to be processed (in seconds) [default: 300]. 
 # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
-quiet-period = 3
-
+quiet-period=3
 # code of the default space in openBIS to which the data will be imported
-import-space-code = TEST
-
+import-space-code=TEST
 # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
 # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
 inputs=merged-channels-images, image-analysis-results, genedata-merged-channels-images, genedata-image-analysis-results, timepoint-images
-
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels
-merged-channels-images.incoming-data-completeness-condition = auto-detection
-
+merged-channels-images.incoming-dir=${incoming-root-dir}/incoming-images-merged-channels
+merged-channels-images.incoming-data-completeness-condition=auto-detection
 # The extractor class to use for code extraction
-merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-merged-channels-images.data-set-info-extractor.entity-separator = .
-merged-channels-images.data-set-info-extractor.index-of-sample-code = 0
-merged-channels-images.data-set-info-extractor.index-of-data-producer-code = 
+merged-channels-images.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+merged-channels-images.data-set-info-extractor.entity-separator=.
+merged-channels-images.data-set-info-extractor.index-of-sample-code=0
+merged-channels-images.data-set-info-extractor.index-of-data-producer-code=
 merged-channels-images.data-set-info-extractor.space-code = ${import-space-code}
-
 # The extractor class to use for type extraction
-merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-merged-channels-images.type-extractor.file-format-type = JPG
-merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
-merged-channels-images.type-extractor.data-set-type = HCS_IMAGE
-merged-channels-images.type-extractor.is-measured = true
-
-merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
+merged-channels-images.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+merged-channels-images.type-extractor.file-format-type=JPG
+merged-channels-images.type-extractor.locator-type=RELATIVE_LOCATION
+merged-channels-images.type-extractor.data-set-type=HCS_IMAGE
+merged-channels-images.type-extractor.is-measured=true
+merged-channels-images.storage-processor=ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
 # Should the thumbnails be generated? 
 # It slows down the dataset registration, but increases the performance when the user wants to see the image. 
 # Can be 'true' or 'false', 'false' is the default value
-merged-channels-images.storage-processor.generate-thumbnails = false
+merged-channels-images.storage-processor.generate-thumbnails=false
 # Thumbnails size in pixels
 # merged-channels-images.storage-processor.thumbnail-max-width = 300
 # merged-channels-images.storage-processor.thumbnail-max-height = 200
 # DEPRECATED: use 'channel-codes' and 'channel-labels' instead
 #merged-channels-images.storage-processor.channel-names = gfp, dapi
 # Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_]. Number and order of entries must be consistent with 'channel-labels'.
-merged-channels-images.storage-processor.channel-codes = GFP, DAPI
+merged-channels-images.storage-processor.channel-codes=GFP, DAPI
 # Labels of the channels in which images have been acquired. Number and order of entries must be consistent with 'channel-codes'.
-merged-channels-images.storage-processor.channel-labels = Gfp, Dapi
+merged-channels-images.storage-processor.channel-labels=Gfp, Dapi
 # Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images.
-merged-channels-images.storage-processor.well_geometry = 3x3
+merged-channels-images.storage-processor.well_geometry=3x3
 # implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel
 # Here: the extractor requireds that each image name should adhere to the schema:
 #     <any-text>_<plate-code>_<well-code>_<tile-code>_<channel-name>.<allowed-image-extension>
-merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
+merged-channels-images.storage-processor.file-extractor=ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
 # specification of the imaging db
-merged-channels-images.storage-processor.data-source = imaging-db
+merged-channels-images.storage-processor.data-source=imaging-db
 # Optional comma separated list of color components. 
 # Available values: RED, GREEN or BLUE. 
 # If specified then the channels are extracted from the color components and override 'file-extractor' results.
-merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE
-
-
+merged-channels-images.storage-processor.extract-single-image-channels=GREEN, BLUE
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis
-image-analysis-results.incoming-data-completeness-condition = auto-detection
-
+image-analysis-results.incoming-dir=${incoming-root-dir}/incoming-analysis
+image-analysis-results.incoming-data-completeness-condition=auto-detection
 # The extractor class to use for code extraction
-image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+image-analysis-results.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-image-analysis-results.data-set-info-extractor.entity-separator = .
-image-analysis-results.data-set-info-extractor.index-of-sample-code = 0
-image-analysis-results.data-set-info-extractor.space-code = ${import-space-code}
-
+image-analysis-results.data-set-info-extractor.entity-separator=.
+image-analysis-results.data-set-info-extractor.index-of-sample-code=0
+image-analysis-results.data-set-info-extractor.space-code=${import-space-code}
 # The extractor class to use for type extraction
-image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-image-analysis-results.type-extractor.file-format-type = CSV
-image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION
-image-analysis-results.type-extractor.data-set-type = HCS_IMAGE_ANALYSIS_DATA
-image-analysis-results.type-extractor.is-measured = false
-
+image-analysis-results.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+image-analysis-results.type-extractor.file-format-type=CSV
+image-analysis-results.type-extractor.locator-type=RELATIVE_LOCATION
+image-analysis-results.type-extractor.data-set-type=HCS_IMAGE_ANALYSIS_DATA
+image-analysis-results.type-extractor.is-measured=false
 # The storage processor (IStorageProcessor implementation)
-image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor
-image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
-image-analysis-results.storage-processor.data-source = imaging-db
+image-analysis-results.storage-processor=ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor
+image-analysis-results.storage-processor.processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+image-analysis-results.storage-processor.data-source=imaging-db
 # semi-colon (;) by default 
-image-analysis-results.storage-processor.separator = ,
-image-analysis-results.storage-processor.ignore-comments = true
-image-analysis-results.storage-processor.well-name-row = row
-image-analysis-results.storage-processor.well-name-col = col
-image-analysis-results.storage-processor.well-name-col-is-alphanum = true
-
+image-analysis-results.storage-processor.separator=,
+image-analysis-results.storage-processor.ignore-comments=true
+image-analysis-results.storage-processor.well-name-row=row
+image-analysis-results.storage-processor.well-name-col=col
+image-analysis-results.storage-processor.well-name-col-is-alphanum=true
 # ---------------------------------------------------------------------------
 # GENEDATA formats
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-genedata-merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-genedata
-genedata-merged-channels-images.incoming-data-completeness-condition = auto-detection
-
+genedata-merged-channels-images.incoming-dir=${incoming-root-dir}/incoming-images-genedata
+genedata-merged-channels-images.incoming-data-completeness-condition=auto-detection
 # The extractor class to use for code extraction
-genedata-merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.openbis.dss.etl.genedata.DataSetInfoExtractorForDataAcquisition
+genedata-merged-channels-images.data-set-info-extractor=ch.systemsx.cisd.openbis.dss.etl.genedata.DataSetInfoExtractorForDataAcquisition
 # Separator used to extract the barcode in the data set file name
-genedata-merged-channels-images.data-set-info-extractor.entity-separator = .
-genedata-merged-channels-images.data-set-info-extractor.index-of-sample-code = 0
-genedata-merged-channels-images.data-set-info-extractor.index-of-data-producer-code = 
+genedata-merged-channels-images.data-set-info-extractor.entity-separator=.
+genedata-merged-channels-images.data-set-info-extractor.index-of-sample-code=0
+genedata-merged-channels-images.data-set-info-extractor.index-of-data-producer-code=
 genedata-merged-channels-images.data-set-info-extractor.space-code = ${import-space-code}
-
 # The extractor class to use for type extraction
-genedata-merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-genedata-merged-channels-images.type-extractor.file-format-type = JPG
-genedata-merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION
-genedata-merged-channels-images.type-extractor.data-set-type = HCS_IMAGE
-genedata-merged-channels-images.type-extractor.is-measured = true
-
-genedata-merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
-genedata-merged-channels-images.storage-processor.well_geometry = 1x1
-genedata-merged-channels-images.storage-processor.channel-codes = DAPI, GFP
-genedata-merged-channels-images.storage-processor.channel-labels = Dapi, Gfp
+genedata-merged-channels-images.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+genedata-merged-channels-images.type-extractor.file-format-type=JPG
+genedata-merged-channels-images.type-extractor.locator-type=RELATIVE_LOCATION
+genedata-merged-channels-images.type-extractor.data-set-type=HCS_IMAGE
+genedata-merged-channels-images.type-extractor.is-measured=true
+genedata-merged-channels-images.storage-processor=ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
+genedata-merged-channels-images.storage-processor.well_geometry=1x1
+genedata-merged-channels-images.storage-processor.channel-codes=DAPI, GFP
+genedata-merged-channels-images.storage-processor.channel-labels=Dapi, Gfp
 # Available values: RED, BLUE or GREEN
-genedata-merged-channels-images.storage-processor.extract-single-image-channels = BLUE, GREEN
-genedata-merged-channels-images.storage-processor.deprecated-file-extractor = ch.systemsx.cisd.openbis.dss.etl.genedata.HCSImageFileExtractor
-genedata-merged-channels-images.storage-processor.data-source = imaging-db
-
+genedata-merged-channels-images.storage-processor.extract-single-image-channels=BLUE, GREEN
+genedata-merged-channels-images.storage-processor.deprecated-file-extractor=ch.systemsx.cisd.openbis.dss.etl.genedata.HCSImageFileExtractor
+genedata-merged-channels-images.storage-processor.data-source=imaging-db
 #  --------------------------------------------------------------
-
 # The directory to watch for incoming data.
-genedata-image-analysis-results.incoming-dir = ${incoming-root-dir}/incoming-analysis-genedata
-genedata-image-analysis-results.incoming-data-completeness-condition = auto-detection
-
+genedata-image-analysis-results.incoming-dir=${incoming-root-dir}/incoming-analysis-genedata
+genedata-image-analysis-results.incoming-data-completeness-condition=auto-detection
 # The extractor class to use for code extraction
-genedata-image-analysis-results.data-set-info-extractor = ch.systemsx.cisd.openbis.dss.etl.genedata.DataSetInfoExtractorForImageAnalysis
+genedata-image-analysis-results.data-set-info-extractor=ch.systemsx.cisd.openbis.dss.etl.genedata.DataSetInfoExtractorForImageAnalysis
 # Separator used to extract the barcode in the data set file name
-genedata-image-analysis-results.data-set-info-extractor.entity-separator = .
-genedata-image-analysis-results.data-set-info-extractor.index-of-sample-code = 0
-genedata-image-analysis-results.data-set-info-extractor.space-code = ${import-space-code}
-
+genedata-image-analysis-results.data-set-info-extractor.entity-separator=.
+genedata-image-analysis-results.data-set-info-extractor.index-of-sample-code=0
+genedata-image-analysis-results.data-set-info-extractor.space-code=${import-space-code}
 # The extractor class to use for type extraction
-genedata-image-analysis-results.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-genedata-image-analysis-results.type-extractor.file-format-type = CSV
-genedata-image-analysis-results.type-extractor.locator-type = RELATIVE_LOCATION
-genedata-image-analysis-results.type-extractor.data-set-type = HCS_IMAGE_ANALYSIS_DATA
-genedata-image-analysis-results.type-extractor.is-measured = false
-
+genedata-image-analysis-results.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+genedata-image-analysis-results.type-extractor.file-format-type=CSV
+genedata-image-analysis-results.type-extractor.locator-type=RELATIVE_LOCATION
+genedata-image-analysis-results.type-extractor.data-set-type=HCS_IMAGE_ANALYSIS_DATA
+genedata-image-analysis-results.type-extractor.is-measured=false
 # The storage processor (IStorageProcessor implementation)
-genedata-image-analysis-results.storage-processor = ch.systemsx.cisd.openbis.dss.etl.genedata.FeatureStorageProcessor
-genedata-image-analysis-results.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
-genedata-image-analysis-results.storage-processor.data-source = imaging-db
-
+genedata-image-analysis-results.storage-processor=ch.systemsx.cisd.openbis.dss.etl.genedata.FeatureStorageProcessor
+genedata-image-analysis-results.storage-processor.processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+genedata-image-analysis-results.storage-processor.data-source=imaging-db
 # ---------------------------------------------------------------------------
 # Timepoint Images
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-timepoint-images.incoming-dir = ${incoming-root-dir}/incoming-images-timepoints
-timepoint-images.incoming-data-completeness-condition = auto-detection
-
+timepoint-images.incoming-dir=${incoming-root-dir}/incoming-images-timepoints
+timepoint-images.incoming-data-completeness-condition=auto-detection
 # The extractor class to use for code extraction
-timepoint-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
-timepoint-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-timepoint-images.data-set-info-extractor.index-of-sample-code = 0
-timepoint-images.data-set-info-extractor.space-code = ${import-space-code}
-
+timepoint-images.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+timepoint-images.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+timepoint-images.data-set-info-extractor.index-of-sample-code=0
+timepoint-images.data-set-info-extractor.space-code=${import-space-code}
 # The extractor class to use for type extraction
-timepoint-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-timepoint-images.type-extractor.file-format-type = TIFF
-timepoint-images.type-extractor.locator-type = RELATIVE_LOCATION
-timepoint-images.type-extractor.data-set-type = HCS_IMAGE
-timepoint-images.type-extractor.is-measured = true
-
-timepoint-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
-timepoint-images.storage-processor.generate-thumbnails = false
-timepoint-images.storage-processor.channel-names = dia, epi
-timepoint-images.storage-processor.well_geometry = 1x1
-timepoint-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.dynamix.HCSImageFileExtractor
-timepoint-images.storage-processor.data-source = imaging-db
-
+timepoint-images.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+timepoint-images.type-extractor.file-format-type=TIFF
+timepoint-images.type-extractor.locator-type=RELATIVE_LOCATION
+timepoint-images.type-extractor.data-set-type=HCS_IMAGE
+timepoint-images.type-extractor.is-measured=true
+timepoint-images.storage-processor=ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor
+timepoint-images.storage-processor.generate-thumbnails=false
+timepoint-images.storage-processor.channel-names=dia, epi
+timepoint-images.storage-processor.well_geometry=1x1
+timepoint-images.storage-processor.file-extractor=ch.systemsx.cisd.openbis.dss.etl.dynamix.HCSImageFileExtractor
+timepoint-images.storage-processor.data-source=imaging-db
 jython-version=2.7
\ No newline at end of file
diff --git a/server-original-data-store/etc/service.properties b/server-original-data-store/etc/service.properties
index 3e8cfc2b4c7d4054472ea3b55b8ff27db7adb8ea..933f5ded89ff16e61122f3e74c7de16bd050893e 100644
--- a/server-original-data-store/etc/service.properties
+++ b/server-original-data-store/etc/service.properties
@@ -1,148 +1,112 @@
 # Unique code of this Data Store Server. Not more than 40 characters.
-data-store-server-code = standard
-
-root-dir = targets
-
+data-store-server-code=standard
+root-dir=targets
 # The root directory of the data store
-storeroot-dir = ${root-dir}/store
-
+storeroot-dir=${root-dir}/store
 # The directory for incoming files over rpc
-rpc-incoming-dir = ${root-dir}/incoming-rpc
-
-core-plugins-folder = source/core-plugins
-
-# The directory where the command queue file is located; defaults to storeroot-dir 
-commandqueue-dir =
-
+rpc-incoming-dir=${root-dir}/incoming-rpc
+core-plugins-folder=source/core-plugins
+# The directory where the command queue file is located; defaults to storeroot-dir
+commandqueue-dir=
 # Port
-port = 8889
-
+port=8889
 # Session timeout in minutes
-session-timeout = 30
-
+session-timeout=30
 # Set to 'false' for development/testing without deployed server. In this mode datastore will not use
 # SSL when connecting to openbis. Otherwise all 'keystore' properties need to be set for SSL connection 
 # (default when use-ssl property is not set so there is no need to specify it on production servers).
-use-ssl = false
-
+use-ssl=false
 # Path, password and key password for SSL connection
 #keystore.path = dist/etc/openBIS.keystore
 #keystore.password = changeit
 #keystore.key-password = changeit
-
 # The check interval (in seconds)
-check-interval = 5
-
+check-interval=5
 # The time-out for clean up work in the shutdown sequence (in seconds).
 # Note that that the maximal time for the shutdown sequence to complete can be as large 
 # as twice this time.
-shutdown-timeout = 2
-
+shutdown-timeout=2
 # The minimum time (in seconds) of availability of the data stream since the time when user requested
 # for the data stream url. If not specified default value (20 seconds) will be used.
 # data-stream-timeout = 20
-
 #�If free disk space goes below value defined here, a notification email will be sent.
 # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
 # specified or if value is negative, the system will not be watching.
-highwater-mark = 1048576
-
-# If a data set is successfully registered it sends out an email to the registrator. 
+highwater-mark=1048576
+# If a data set is successfully registered it sends out an email to the registrator.
 # If this property is not specified, no email is sent to the registrator. This property
 # does not affect the mails which are sent, when the data set could not be registered.
-notify-successful-registration = false
-
+notify-successful-registration=false
 # The URL of the openBIS server
-server-url = http://localhost:8888
-
+server-url=http://localhost:8888
 # Time out for accessing openBIS server. Default value is 5 minutes.
-server-timeout-in-minutes = 10
-
+server-timeout-in-minutes=10
 # The location of the jars accessable to web start clients
-webstart-jar-path = targets/dist/datastore_server/lib
-
+webstart-jar-path=targets/dist/datastore_server/lib
 # The username to use when contacting the openBIS server
-username = etlserver
-
+username=etlserver
 # The password to use when contacting the openBIS server
-password = doesnotmatter
-
+password=doesnotmatter
 #
 # CIFEX configuration: Only needed if data export should work without the user having to type in 
 # his password. Note that in in order for this to work the CIFEX server needs to be configured to 
 # allow calling setSessionUser() from the IP address of this data store server, see configuration 
 # option allowed-ips-for-set-session-user in CIFEX's service.properties    
 #
-
-# The admin username to use when contacting the CIFEX server 
-cifex-admin-username =
-
-# The admin password to use when contacting the CIFEX server 
-cifex-admin-password =
-
+# The admin username to use when contacting the CIFEX server
+cifex-admin-username=
+# The admin password to use when contacting the CIFEX server
+cifex-admin-password=
 # The base URL for Web client access.
-download-url = http://localhost:8889
-
+download-url=http://localhost:8889
 # SMTP properties (must start with 'mail' to be considered).
-mail.smtp.host = file://${root-dir}/email
-mail.from = datastore_server@localhost
-mail.smtp.user = 
+mail.smtp.host=file://${root-dir}/email
+mail.from=datastore_server@localhost
+mail.smtp.user=
 mail.smtp.password = 
 # If this property is set a test e-mail will be sent to the specified address after DSS successfully started-up.
-mail.test.address = test@localhost
+mail.test.address=test@localhost
 # Email addresses of people to get notifications about problems in dataset registrations
-mail.addresses.dropbox-errors = admin1@localhost,admin2@localhost
-
+mail.addresses.dropbox-errors=admin1@localhost,admin2@localhost
 # ---------------------------------------------------------------------------
 # Data sources
-data-sources = path-info-db 
-
+data-sources=path-info-db 
 # Data source for pathinfo database
-path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
-path-info-db.databaseEngineCode = postgresql
-path-info-db.basicDatabaseName = pathinfo
-path-info-db.databaseKind = dev
-path-info-db.scriptFolder = ../datastore_server/source/sql
-
-
+path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
+path-info-db.databaseEngineCode=postgresql
+path-info-db.basicDatabaseName=pathinfo
+path-info-db.databaseKind=dev
+path-info-db.scriptFolder=../server-original-data-store/source/sql
 # ---------------------------------------------------------------------------
-
 # Maximum number of retries if renaming failed.
 # renaming.failure.max-retries = 12
-
 # The number of milliseconds to wait before retrying to execute the renaming process.
 # renaming.failure.millis-to-sleep = 5000
-
-# Globally used separator character which separates entities in a data set file name 
-data-set-file-name-entity-separator = _
-
-# The period of no write access that needs to pass before an incoming data item is considered 
+# Globally used separator character which separates entities in a data set file name
+data-set-file-name-entity-separator=_
+# The period of no write access that needs to pass before an incoming data item is considered
 # complete and ready to be processed (in seconds) [default: 300]. 
 # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
-quiet-period = 3
-
+quiet-period=3
 # ---------------------------------------------------------------------------
 # reporting and processing plugins configuration
 # ---------------------------------------------------------------------------
-
 # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
 # If name has 'default-' prefix it will be used by default in data set Data View.
-reporting-plugins = demo-reporter, tsv-viewer, csv-viewer, hcs-viewer, hcs-viewer-tiff, default-hcs-viewer
-
+reporting-plugins=demo-reporter, tsv-viewer, csv-viewer, hcs-viewer, hcs-viewer-tiff, default-hcs-viewer
 # Label of the plugin which will be shown for the users.
-demo-reporter.label = Show Dataset Size
+demo-reporter.label=Show Dataset Size
 # Comma separated list of dataset type codes which can be handled by this plugin.
 # Wildcards are allowed, but the DSS server may need to be restarted if a new data set type is added to openBIS
-demo-reporter.dataset-types = .*
+demo-reporter.dataset-types=.*
 # Plugin class specification (together with the list of packages this class belongs to).
-demo-reporter.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
+demo-reporter.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
 # The property file. Its content will be passed as a parameter to the plugin.
-demo-reporter.properties-file = 
-
+demo-reporter.properties-file=
 tsv-viewer.label = TSV View 
-tsv-viewer.dataset-types = TSV
-tsv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
-tsv-viewer.properties-file =
+tsv-viewer.dataset-types=TSV
+tsv-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
+tsv-viewer.properties-file=
 # Optional properties:
 # - separator of values in the file, significant for TSV and CSV files; default: tab
 #tsv-viewer.separator = ;
@@ -150,115 +114,94 @@ tsv-viewer.properties-file =
 #tsv-viewer.ignore-comments = false
 # - excel sheet name or index (0 based) used for the excel file (.xsl or .xslx); default: 0 (first sheet)
 #tsv-viewer.excel-sheet = example_sheet_name
-
-csv-viewer.label = CSV View 
-csv-viewer.dataset-types = CSV
-csv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
-csv-viewer.separator = ,
-
-hcs-viewer.label = HCS PNG 	
-hcs-viewer.dataset-types = HCS_IMAGE
-hcs-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-hcs-viewer.download-url = ${download-url}
-hcs-viewer.data-set-regex = .*/PNG/.*\.jpg
-hcs-viewer.data-set-path = original
-
-hcs-viewer-tiff.label = HCS TIFF 	
-hcs-viewer-tiff.dataset-types = HCS_IMAGE
-hcs-viewer-tiff.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-hcs-viewer-tiff.download-url = ${download-url}
-hcs-viewer-tiff.data-set-regex = .*/TIFF/.*\.jpg
-hcs-viewer-tiff.data-set-path = original
-
-default-hcs-viewer.label = Default HCS View 	
-default-hcs-viewer.dataset-types = HCS_IMAGE
-default-hcs-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-default-hcs-viewer.download-url = ${download-url}
-default-hcs-viewer.data-set-regex = .*\.jpg
-
-
+csv-viewer.label=CSV View 
+csv-viewer.dataset-types=CSV
+csv-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
+csv-viewer.separator=,
+hcs-viewer.label=HCS PNG 	
+hcs-viewer.dataset-types=HCS_IMAGE
+hcs-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+hcs-viewer.download-url=${download-url}
+hcs-viewer.data-set-regex=.*/PNG/.*\.jpg
+hcs-viewer.data-set-path=original
+hcs-viewer-tiff.label=HCS TIFF 	
+hcs-viewer-tiff.dataset-types=HCS_IMAGE
+hcs-viewer-tiff.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+hcs-viewer-tiff.download-url=${download-url}
+hcs-viewer-tiff.data-set-regex=.*/TIFF/.*\.jpg
+hcs-viewer-tiff.data-set-path=original
+default-hcs-viewer.label=Default HCS View 	
+default-hcs-viewer.dataset-types=HCS_IMAGE
+default-hcs-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+default-hcs-viewer.download-url=${download-url}
+default-hcs-viewer.data-set-regex=.*\.jpg
 # Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
-processing-plugins = demo-processor
-
-# The configuration of the processing plugin is the same as the reporting plugins configuration. 
-demo-processor.label = Demo Processing
+processing-plugins=demo-processor
+# The configuration of the processing plugin is the same as the reporting plugins configuration.
+demo-processor.label=Demo Processing
 # Wildcards are allowed, but the DSS server may need to be restarted if a new data set type is added to openBIS
-demo-processor.dataset-types = HCS_.*, CONTAINER_.*
-demo-processor.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoProcessingPlugin
-demo-processor.properties-file = 
-
+demo-processor.dataset-types=HCS_.*, CONTAINER_.*
+demo-processor.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoProcessingPlugin
+demo-processor.properties-file=
 # Data set validators used to accept or reject data sets to be registered.
 # Comma separated list of validator definitions.
-data-set-validators = validator
-
+data-set-validators=validator
 # Definition of data set validator 'validator'
-validator.data-set-type = HCS_IMAGE
-validator.path-patterns = **/*.txt
-validator.columns = id, description, size
-validator.id.header-pattern = ID
-validator.id.mandatory = true
-validator.id.order = 1
-validator.id.value-type = unique
-validator.description.header-pattern = Description
-validator.description.value-type = string
-validator.description.value-pattern = .{0,100}
-validator.size.header-pattern = A[0-9]+
-validator.size.can-define-multiple-columns = true
-validator.size.allow-empty-values = true
-validator.size.value-type = numeric
-validator.site.value-range = [0,Infinity)
-
+validator.data-set-type=HCS_IMAGE
+validator.path-patterns=**/*.txt
+validator.columns=id, description, size
+validator.id.header-pattern=ID
+validator.id.mandatory=true
+validator.id.order=1
+validator.id.value-type=unique
+validator.description.header-pattern=Description
+validator.description.value-type=string
+validator.description.value-pattern=.{0,100}
+validator.size.header-pattern=A[0-9]+
+validator.size.can-define-multiple-columns=true
+validator.size.allow-empty-values=true
+validator.size.value-type=numeric
+validator.site.value-range=[0,Infinity)
 # ---------------------------------------------------------------------------
-
 # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
 # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
 inputs=main-thread, tsv-thread, csv-thread, simple-thread, hdf5-thread, dss-system-test-thread
-
-dss-rpc.put.CUSTOM-IMPORT = test-custom-import
-
-# True if incoming directories should be created on server startup if they don't exist. 
+dss-rpc.put.CUSTOM-IMPORT=test-custom-import
+# True if incoming directories should be created on server startup if they don't exist.
 # Default - false (server will fail at startup if one of incoming directories doesn't exist). 
-incoming-dir-create = true
-
+incoming-dir-create=true
 # ---------------------------------------------------------------------------
 # Dataset uploader 'main-thread' configuration
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-main-thread.incoming-dir = ${root-dir}/incoming
-
-# Specifies what should happen if an error occurs during dataset processing. 
+main-thread.incoming-dir=${root-dir}/incoming
+# Specifies what should happen if an error occurs during dataset processing.
 # By default this flag is set to false and user has to modify the 'faulty paths file' 
 # each time the faulty dataset should be processed again.
 # Set this flag to true if the processing should be repeated after some time without manual intervention.
 # Note that this can increase the server load.
 # main-thread.reprocess-faulty-datasets = false
-
 # Determines when the incoming data should be considered complete and ready to be processed.
 # Allowed values: 
 #  - auto-detection - when no write access will be detected for a specified 'quite-period'
 #  - marker-file		- when an appropriate marker file for the data exists. 
 # The default value is 'marker-file'.
-main-thread.incoming-data-completeness-condition = auto-detection
-
+main-thread.incoming-data-completeness-condition=auto-detection
 # The store format that should be applied in the incoming directory.
-main-thread.incoming-dir.format = 
-
+main-thread.incoming-dir.format=
 # The extractor plugin class to use for code extraction
-main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+main-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-main-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
+main-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
 #main-thread.data-set-info-extractor.space-code = CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-main-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-main-thread.type-extractor.file-format-type = TIFF
-main-thread.type-extractor.locator-type = RELATIVE_LOCATION
-main-thread.type-extractor.data-set-type = HCS_IMAGE
-
+main-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+main-thread.type-extractor.file-format-type=TIFF
+main-thread.type-extractor.locator-type=RELATIVE_LOCATION
+main-thread.type-extractor.data-set-type=HCS_IMAGE
 # The storage processor (IStorageProcessor implementation)
-main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+main-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 #main-thread.storage-processor.version = 1.1
 #main-thread.storage-processor.sampleTypeCode = CELL_PLATE
 #main-thread.storage-processor.sampleTypeDescription = Screening Plate
@@ -267,102 +210,90 @@ main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcess
 #main-thread.storage-processor.contains_original_data = TRUE
 #main-thread.storage-processor.well_geometry = 3x3
 #main-thread.storage-processor.file-extractor = ch.systemsx.cisd.etlserver.imsb.HCSImageFileExtractor
-
-# Path to the script that will be executed after successful data set registration. 
+# Path to the script that will be executed after successful data set registration.
 # The script will be called with two parameters: <data-set-code> and <absolute-data-set-path> (in the data store).
 # main-thread.post-registration-script = /example/scripts/my-script.sh
-
 # The directory to watch for incoming data.
-tsv-thread.incoming-dir = ${root-dir}/incoming-tsv
-tsv-thread.incoming-data-completeness-condition = auto-detection
-tsv-thread.strip-file-extension = true
-tsv-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+tsv-thread.incoming-dir=${root-dir}/incoming-tsv
+tsv-thread.incoming-data-completeness-condition=auto-detection
+tsv-thread.strip-file-extension=true
+tsv-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-tsv-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+tsv-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-tsv-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-tsv-thread.data-set-info-extractor.strip-file-extension = true
+tsv-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+tsv-thread.data-set-info-extractor.strip-file-extension=true
 #main-thread.data-set-info-extractor.space-code = CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-tsv-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-tsv-thread.type-extractor.file-format-type = TSV
-tsv-thread.type-extractor.locator-type = RELATIVE_LOCATION
-tsv-thread.type-extractor.data-set-type = TSV
-
-csv-thread.incoming-dir = ${root-dir}/incoming-csv
-csv-thread.incoming-data-completeness-condition = auto-detection
-csv-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+tsv-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+tsv-thread.type-extractor.file-format-type=TSV
+tsv-thread.type-extractor.locator-type=RELATIVE_LOCATION
+tsv-thread.type-extractor.data-set-type=TSV
+csv-thread.incoming-dir=${root-dir}/incoming-csv
+csv-thread.incoming-data-completeness-condition=auto-detection
+csv-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-csv-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+csv-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-csv-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-csv-thread.data-set-info-extractor.strip-file-extension = true
-csv-thread.data-set-info-extractor.space-code = CISD
+csv-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+csv-thread.data-set-info-extractor.strip-file-extension=true
+csv-thread.data-set-info-extractor.space-code=CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-csv-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-csv-thread.type-extractor.file-format-type = CSV
-csv-thread.type-extractor.locator-type = RELATIVE_LOCATION
-csv-thread.type-extractor.data-set-type = CSV
-
+csv-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+csv-thread.type-extractor.file-format-type=CSV
+csv-thread.type-extractor.locator-type=RELATIVE_LOCATION
+csv-thread.type-extractor.data-set-type=CSV
 # The directory to watch for incoming data.
-simple-thread.incoming-dir = ${root-dir}/incoming-simple
-simple-thread.incoming-data-completeness-condition = auto-detection
-simple-thread.strip-file-extension = true
-simple-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+simple-thread.incoming-dir=${root-dir}/incoming-simple
+simple-thread.incoming-data-completeness-condition=auto-detection
+simple-thread.strip-file-extension=true
+simple-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-simple-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+simple-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-simple-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-simple-thread.data-set-info-extractor.strip-file-extension = true
-simple-thread.data-set-info-extractor.space-code = CISD
-
-simple-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-simple-thread.type-extractor.file-format-type = TIFF
-simple-thread.type-extractor.locator-type = RELATIVE_LOCATION
-simple-thread.type-extractor.data-set-type = HCS_IMAGE
-
+simple-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+simple-thread.data-set-info-extractor.strip-file-extension=true
+simple-thread.data-set-info-extractor.space-code=CISD
+simple-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+simple-thread.type-extractor.file-format-type=TIFF
+simple-thread.type-extractor.locator-type=RELATIVE_LOCATION
+simple-thread.type-extractor.data-set-type=HCS_IMAGE
 # The directory to watch for incoming data.
-hdf5-thread.incoming-dir = ${root-dir}/incoming-hdf5
-hdf5-thread.incoming-data-completeness-condition = auto-detection
-hdf5-thread.strip-file-extension = true
-hdf5-thread.storage-processor = ch.systemsx.cisd.etlserver.hdf5.HDF5StorageProcessor
+hdf5-thread.incoming-dir=${root-dir}/incoming-hdf5
+hdf5-thread.incoming-data-completeness-condition=auto-detection
+hdf5-thread.strip-file-extension=true
+hdf5-thread.storage-processor=ch.systemsx.cisd.etlserver.hdf5.HDF5StorageProcessor
 # The extractor plugin class to use for code extraction
-hdf5-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+hdf5-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-hdf5-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-hdf5-thread.data-set-info-extractor.strip-file-extension = true
-hdf5-thread.data-set-info-extractor.space-code = SPACE
-
-hdf5-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-hdf5-thread.type-extractor.file-format-type = HDF5
-hdf5-thread.type-extractor.locator-type = RELATIVE_LOCATION
-hdf5-thread.type-extractor.data-set-type = HCS_IMAGE
-
-dss-system-test-thread.incoming-dir = ${root-dir}/incoming-simple
-dss-system-test-thread.incoming-data-completeness-condition = auto-detection
-dss-system-test-thread.strip-file-extension = true
-dss-system-test-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+hdf5-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+hdf5-thread.data-set-info-extractor.strip-file-extension=true
+hdf5-thread.data-set-info-extractor.space-code=SPACE
+hdf5-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+hdf5-thread.type-extractor.file-format-type=HDF5
+hdf5-thread.type-extractor.locator-type=RELATIVE_LOCATION
+hdf5-thread.type-extractor.data-set-type=HCS_IMAGE
+dss-system-test-thread.incoming-dir=${root-dir}/incoming-simple
+dss-system-test-thread.incoming-data-completeness-condition=auto-detection
+dss-system-test-thread.strip-file-extension=true
+dss-system-test-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-dss-system-test-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+dss-system-test-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-dss-system-test-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-dss-system-test-thread.data-set-info-extractor.strip-file-extension = true
-dss-system-test-thread.data-set-info-extractor.space-code = CISD
-dss-system-test-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-dss-system-test-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-dss-system-test-thread.type-extractor.file-format-type = TIFF
-dss-system-test-thread.type-extractor.locator-type = RELATIVE_LOCATION
-dss-system-test-thread.type-extractor.data-set-type = HCS_IMAGE
-
+dss-system-test-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+dss-system-test-thread.data-set-info-extractor.strip-file-extension=true
+dss-system-test-thread.data-set-info-extractor.space-code=CISD
+dss-system-test-thread.data-set-info-extractor.data-set-properties-file-name=data-set.properties
+dss-system-test-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+dss-system-test-thread.type-extractor.file-format-type=TIFF
+dss-system-test-thread.type-extractor.locator-type=RELATIVE_LOCATION
+dss-system-test-thread.type-extractor.data-set-type=HCS_IMAGE
 # ---------------------------------------------------------------------------
 # (optional) image overview plugins configuration
 # ---------------------------------------------------------------------------
-
-# Comma separated names of image overview plugins. 
+# Comma separated names of image overview plugins.
 # Each plugin should have configuration properties prefixed with its name.
 # Generic properties for each <plugin> include: 
 #   <plugin>.class   - Fully qualified plugin class name (mandatory).
@@ -370,40 +301,33 @@ dss-system-test-thread.type-extractor.data-set-type = HCS_IMAGE
 #                      by the plugin (default = false). 
 #   <plugin>.dataset-types - Comma separated list of data set types handled by the plugin 
 #                      (optional and ignored if default is true, otherwise mandatory). 
-overview-plugins = default-overview, hcs-image-overview
-
-default-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
-default-overview.default = true
+overview-plugins=default-overview, hcs-image-overview
+default-overview.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
+default-overview.default=true
 # Optional property specific to the plugin
-default-overview.label = default plugin
-
-hcs-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
-hcs-image-overview.dataset-types = HCS_IMAGE
-hcs-image-overview.label = plugin for HCS_IMAGE
-
+default-overview.label=default plugin
+hcs-image-overview.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
+hcs-image-overview.dataset-types=HCS_IMAGE
+hcs-image-overview.label=plugin for HCS_IMAGE
 # ---------------------------------------------------------------------------
 # (optional) archiver configuration
 # ---------------------------------------------------------------------------
-
 # Configuration of an archiver task. All properties are prefixed with 'archiver.'.
-
 # Archiver class specification (together with the list of packages this class belongs to).
 #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
-archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiver
+archiver.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiver
 # dectination of the archive (can be local or remote)
 # local:
 #archiver.destination = openbis:tmp/dest
 # remote:
-archiver.destination = /Users/openbis/dest
+archiver.destination=/Users/openbis/dest
 # indicates if data should be synchronized when local copy differs from one in archive (default: true)
-archiver.synchronize-archive = true
-archiver.batch-size-in-bytes = 20000000
-
+archiver.synchronize-archive=true
+archiver.batch-size-in-bytes=20000000
 # ---------------------------------------------------------------------------
 # maintenance plugins configuration
 # ---------------------------------------------------------------------------
-
-# Comma separated names of maintenance plugins.  
+# Comma separated names of maintenance plugins.
 # Each plugin should have configuration properties prefixed with its name.
 # Mandatory properties for each <plugin> include: 
 #   <plugin>.class - Fully qualified plugin class name
@@ -412,15 +336,14 @@ archiver.batch-size-in-bytes = 20000000
 #   <plugin>.start - Time of the first execution (HH:mm)
 #   <plugin>.execute-only-once - If true the task will be executed exactly once, 
 #                                interval will be ignored. By default set to false.
-maintenance-plugins = auto-archiver, path-info-deletion, post-registration
+maintenance-plugins=auto-archiver, path-info-deletion, post-registration
 #maintenance-plugins = auto-archiver, path-info-feeding
-
 # Performs automatic archivization of 'ACTIVE' data sets based on their properties
-auto-archiver.class = ch.systemsx.cisd.etlserver.plugins.AutoArchiverTask
+auto-archiver.class=ch.systemsx.cisd.etlserver.plugins.AutoArchiverTask
 # The time between subsequent archivizations (in seconds)
-auto-archiver.interval = 10
+auto-archiver.interval=10
 #  Time of the first execution (HH:mm)
-auto-archiver.start = 23:00
+auto-archiver.start=23:00
 # following properties are optional
 # only data sets of specified type will be archived  
 #auto-archiver.data-set-type = UNKNOWN
@@ -428,45 +351,37 @@ auto-archiver.start = 23:00
 #auto-archiver.older-than = 90
 # fully qualified class name of a policy that additionally filters data sets to be filtered
 #auto-archiver.policy.class = ch.systemsx.cisd.etlserver.plugins.DummyAutoArchiverPolicy
-
-# use this policy to archive datasets in batches grouped by experiment 
+# use this policy to archive datasets in batches grouped by experiment
 # auto-archiver.policy.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.ByExpermientPolicy
-
-# the min-size in bytes, default is 0 
+# the min-size in bytes, default is 0
 #auto-archiver.policy.minimal-archive-size = 
 # the max-size in bytes, default is 2^63-1. 
 #auto-archiver.policy.maximal-archive-size = 
-
 # Maintenance task (performed only once) to create paths of existing data sets in pathinfo database
-path-info-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
-path-info-feeding.execute-only-once = true
-
+path-info-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
+path-info-feeding.execute-only-once=true
 # Maintenance task for deleting entries in pathinfo database after deletion of data sets
-path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
-path-info-deletion.interval = 120
-path-info-deletion.data-source = path-info-db
-path-info-deletion.data-set-perm-id = CODE
-
-# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database  
-post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
-post-registration.interval = 30
-post-registration.cleanup-tasks-folder = targets/cleanup-tasks
+path-info-deletion.class=ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
+path-info-deletion.interval=120
+path-info-deletion.data-source=path-info-db
+path-info-deletion.data-set-perm-id=CODE
+# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
+post-registration.class=ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
+post-registration.interval=30
+post-registration.cleanup-tasks-folder=targets/cleanup-tasks
 # The following date should the day when the DDS is started up the first time with PathInfoDatabaseFeedingTask.
 # After PathInfoDatabaseFeedingTask has been performed it can be removed and the following line can be deleted.
 #post-registration.ignore-data-sets-before-date = 2011-04-18
-post-registration.last-seen-data-set-file = targets/last-seen-data-set
-post-registration.post-registration-tasks = pathinfo-feeding
-post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
-post-registration.pathinfo-feeding.compute-checksum = true
-post-registration.pathinfo-feeding.checksum-type = SHA1
-
+post-registration.last-seen-data-set-file=targets/last-seen-data-set
+post-registration.post-registration-tasks=pathinfo-feeding
+post-registration.pathinfo-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
+post-registration.pathinfo-feeding.compute-checksum=true
+post-registration.pathinfo-feeding.checksum-type=SHA1
 jython-version=2.7
-
 # Typical options to disable coping general, owner and group permissions
 # rsync-options = --no-p --no-o --no-g
-
 #
 # Dropbox - ELN Plugin
 #
-default-incoming-share-id = 1
-default-incoming-share-minimum-free-space-in-gb = 10
\ No newline at end of file
+default-incoming-share-id=1
+default-incoming-share-minimum-free-space-in-gb=10
\ No newline at end of file
diff --git a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/ZipFileIntegrityVerifierTest.java b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/ZipFileIntegrityVerifierTest.java
index 132d9a1562462f8ab78cf641741dad85975bf5fe..94be4e529e6b96262247f820d202b7af393e9a31 100644
--- a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/ZipFileIntegrityVerifierTest.java
+++ b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/ZipFileIntegrityVerifierTest.java
@@ -66,7 +66,8 @@ public class ZipFileIntegrityVerifierTest
 
     private IArchiveFileVerifier verifier;
 
-    private static final File PWD = new File("../datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/");
+    private static final File PWD = new File(
+            "../server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/archiveverifier/verifier/");
 
     private static final File VALID_ZIP_FILE = new File(PWD, "VALID.zip");
 
diff --git a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetDeletionMaintenanceTaskTest.java b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetDeletionMaintenanceTaskTest.java
index 695ac1cc9749882535996a96a86a1dd792f736cc..5b2da26eb8d03e21453468521447883cfb18c011 100644
--- a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetDeletionMaintenanceTaskTest.java
+++ b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetDeletionMaintenanceTaskTest.java
@@ -113,10 +113,12 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
             ":0:0", "wrong_path/:0:0", "wrong_path/test.txt:8:70486887"
     );
 
-    private final String WRONG_PATH_ERROR = "Different paths: Path in the store is '20220111121934409-58/wrong_path' " +
-            "and in the archive '20220111121934409-58/original'.";
+    private final String WRONG_PATH_ERROR =
+            "Different paths: Path in the store is '20220111121934409-58/wrong_path' " +
+                    "and in the archive '20220111121934409-58/original'.";
 
-    private static final class MockMultiDataSetDeletionMaintenanceTask extends MultiDataSetDeletionMaintenanceTask
+    private static final class MockMultiDataSetDeletionMaintenanceTask
+            extends MultiDataSetDeletionMaintenanceTask
     {
         private final IMultiDataSetArchiverDBTransaction transaction;
 
@@ -138,7 +140,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
 
         private final IFreeSpaceProvider freeSpaceProvider;
 
-        public MockMultiDataSetDeletionMaintenanceTask(IMultiDataSetArchiverDBTransaction transaction,
+        public MockMultiDataSetDeletionMaintenanceTask(
+                IMultiDataSetArchiverDBTransaction transaction,
                 IMultiDataSetArchiverReadonlyQueryDAO readonlyDAO,
                 IEncapsulatedOpenBISService openBISService,
                 IDataStoreServiceInternal dataStoreService,
@@ -221,13 +224,15 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
             return MAX_DELETION_DATE;
         }
 
-        @Override protected IFreeSpaceProvider getFreeSpaceProvider()
+        @Override
+        protected IFreeSpaceProvider getFreeSpaceProvider()
         {
             return freeSpaceProvider;
         }
     }
 
-    private static final class MockMultiDataSetFileOperationsManager extends MultiDataSetFileOperationsManager
+    private static final class MockMultiDataSetFileOperationsManager
+            extends MultiDataSetFileOperationsManager
     {
         private static final long serialVersionUID = 1L;
 
@@ -284,7 +289,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         task = new MockMultiDataSetDeletionMaintenanceTask(
                 transaction, transaction, openBISService, dataStoreService,
                 contentProvider, shareIdManager, v3api, configProvider,
-                new MockMultiDataSetFileOperationsManager(properties, directoryProvider, freeSpaceProvider), freeSpaceProvider);
+                new MockMultiDataSetFileOperationsManager(properties, directoryProvider,
+                        freeSpaceProvider), freeSpaceProvider);
         logRecorder = LogRecordingUtils.createRecorder("%-5p %c - %m%n", Level.INFO, "OPERATION.*");
         task.setUp("", properties);
     }
@@ -342,12 +348,18 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         properties.setProperty(
                 MultiDataSetDeletionMaintenanceTask.LAST_SEEN_EVENT_ID_FILE,
                 lastSeenDataSetFile.getPath());
-        properties.setProperty("archiver." + MultiDataSetFileOperationsManager.FINAL_DESTINATION_KEY, archive.getAbsolutePath());
-        properties.setProperty(MultiDataSetFileOperationsManager.FINAL_DESTINATION_KEY, archive.getAbsolutePath());
+        properties.setProperty(
+                "archiver." + MultiDataSetFileOperationsManager.FINAL_DESTINATION_KEY,
+                archive.getAbsolutePath());
+        properties.setProperty(MultiDataSetFileOperationsManager.FINAL_DESTINATION_KEY,
+                archive.getAbsolutePath());
         if (withReplica)
         {
-            properties.setProperty("archiver." + MultiDataSetFileOperationsManager.REPLICATED_DESTINATION_KEY, replicate.getAbsolutePath());
-            properties.setProperty(MultiDataSetFileOperationsManager.REPLICATED_DESTINATION_KEY, replicate.getAbsolutePath());
+            properties.setProperty(
+                    "archiver." + MultiDataSetFileOperationsManager.REPLICATED_DESTINATION_KEY,
+                    replicate.getAbsolutePath());
+            properties.setProperty(MultiDataSetFileOperationsManager.REPLICATED_DESTINATION_KEY,
+                    replicate.getAbsolutePath());
         }
         if (withMappingFile)
         {
@@ -374,7 +386,7 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
     private File copyContainerToArchive(File parent, String folderName)
     {
         File dataSetFile = new File(
-                "../datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/resource/container.tar");
+                "../server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/resource/container.tar");
 
         File container = new File(parent, folderName);
         FileOperations.getInstance().copy(dataSetFile, container);
@@ -397,7 +409,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
     private Map<IDataSetId, DataSet> buildDataSetMap()
     {
         Map<IDataSetId, DataSet> dataSetMap = new HashMap<>();
-        dataSetMap.put(new DataSetPermId(ds4Code), generateDataSet(ds4Code, DATA_SET_STANDARD_SIZE));
+        dataSetMap.put(new DataSetPermId(ds4Code),
+                generateDataSet(ds4Code, DATA_SET_STANDARD_SIZE));
         return dataSetMap;
     }
 
@@ -474,10 +487,12 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         // GIVEN
         Properties properties = createProperties(false, true);
         IFreeSpaceProvider freeSpaceProvider = new SimpleFreeSpaceProvider();
-        MockMultiDataSetDeletionMaintenanceTask taskWithoutReplica = new MockMultiDataSetDeletionMaintenanceTask(
-                transaction, transaction, openBISService, dataStoreService,
-                contentProvider, shareIdManager, v3api, configProvider,
-                new MockMultiDataSetFileOperationsManager(properties, directoryProvider, freeSpaceProvider), freeSpaceProvider);
+        MockMultiDataSetDeletionMaintenanceTask taskWithoutReplica =
+                new MockMultiDataSetDeletionMaintenanceTask(
+                        transaction, transaction, openBISService, dataStoreService,
+                        contentProvider, shareIdManager, v3api, configProvider,
+                        new MockMultiDataSetFileOperationsManager(properties, directoryProvider,
+                                freeSpaceProvider), freeSpaceProvider);
         taskWithoutReplica.setUp("", properties);
 
         // Container1 contains only deleted dataSets
@@ -520,7 +535,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         context.checking(new Expectations()
         {
             {
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
 
@@ -533,7 +549,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 one(contentProvider).asContentWithoutModifyingAccessTimestamp(ds4Code);
                 will(returnValue(goodDs4Content));
 
-                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code), DataSetArchivingStatus.AVAILABLE, false);
+                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code),
+                        DataSetArchivingStatus.AVAILABLE, false);
                 one(v3api).updateDataSets(with(SESSION_TOKEN), with((recordedUpdates)));
             }
         });
@@ -556,7 +573,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         assertEquals(1, recordedUpdates.recordedObject().size());
         DataSetUpdate dataSetUpdate = recordedUpdates.recordedObject().get(0);
         assertTrue(dataSetUpdate.getPhysicalData().getValue().isArchivingRequested().isModified());
-        assertEquals(Boolean.TRUE, dataSetUpdate.getPhysicalData().getValue().isArchivingRequested().getValue());
+        assertEquals(Boolean.TRUE,
+                dataSetUpdate.getPhysicalData().getValue().isArchivingRequested().getValue());
         assertEquals(ds4Code, ((DataSetPermId) dataSetUpdate.getDataSetId()).getPermId());
 
         AssertionUtil.assertContainsLines(
@@ -612,7 +630,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(Arrays.asList(deleted3)));
                 inSequence(sequence);
 
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
                 inSequence(sequence);
@@ -635,7 +654,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(Arrays.asList(deleted3)));
                 inSequence(sequence);
 
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
                 inSequence(sequence);
@@ -653,7 +673,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(goodDs4Content));
                 inSequence(sequence);
 
-                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code), DataSetArchivingStatus.AVAILABLE, false);
+                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code),
+                        DataSetArchivingStatus.AVAILABLE, false);
                 inSequence(sequence);
                 one(v3api).updateDataSets(with(SESSION_TOKEN), with((recordedUpdates)));
                 inSequence(sequence);
@@ -669,7 +690,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
             task.execute();
         } catch (RuntimeException e)
         {
-            assertEquals(e.getMessage(), "Can't delete the container because something bad happened!");
+            assertEquals(e.getMessage(),
+                    "Can't delete the container because something bad happened!");
         }
 
         //check that archive and replica WAS NOT deleted.
@@ -721,7 +743,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(Arrays.asList(deleted3)));
                 inSequence(sequence);
 
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
                 inSequence(sequence);
@@ -744,7 +767,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(Arrays.asList(deleted3)));
                 inSequence(sequence);
 
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
                 inSequence(sequence);
@@ -762,7 +786,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(goodDs4Content));
                 inSequence(sequence);
 
-                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code), DataSetArchivingStatus.AVAILABLE, false);
+                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code),
+                        DataSetArchivingStatus.AVAILABLE, false);
                 inSequence(sequence);
                 one(v3api).updateDataSets(with(SESSION_TOKEN), with((recordedUpdates)));
                 inSequence(sequence);
@@ -845,7 +870,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         context.checking(new Expectations()
         {
             {
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
 
@@ -878,10 +904,12 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         IFreeSpaceProvider freeSpaceProvider = context.mock(IFreeSpaceProvider.class);
         Properties properties = createProperties(true, false);
 
-        MockMultiDataSetDeletionMaintenanceTask taskWithoutMappingFile = new MockMultiDataSetDeletionMaintenanceTask(
-                transaction, transaction, openBISService, dataStoreService,
-                contentProvider, shareIdManager, v3api, configProvider,
-                new MockMultiDataSetFileOperationsManager(properties, directoryProvider, freeSpaceProvider), freeSpaceProvider);
+        MockMultiDataSetDeletionMaintenanceTask taskWithoutMappingFile =
+                new MockMultiDataSetDeletionMaintenanceTask(
+                        transaction, transaction, openBISService, dataStoreService,
+                        contentProvider, shareIdManager, v3api, configProvider,
+                        new MockMultiDataSetFileOperationsManager(properties, directoryProvider,
+                                freeSpaceProvider), freeSpaceProvider);
         taskWithoutMappingFile.setUp("", properties);
 
         final HostAwareFile HostAwareShare = new HostAwareFile(share);
@@ -892,7 +920,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
         {
             {
                 // first taskWithoutMappingFile.execute() call
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
 
@@ -900,7 +929,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 will(returnValue(0L)); // not enough free space
 
                 // second taskWithoutMappingFile.execute() call
-                one(v3api).getDataSets(with(SESSION_TOKEN), with(Arrays.asList(new DataSetPermId(ds4Code))),
+                one(v3api).getDataSets(with(SESSION_TOKEN),
+                        with(Arrays.asList(new DataSetPermId(ds4Code))),
                         with(any(DataSetFetchOptions.class)));
                 will(returnValue(buildDataSetMap()));
 
@@ -916,7 +946,8 @@ public class MultiDataSetDeletionMaintenanceTaskTest extends AbstractFileSystemT
                 one(contentProvider).asContentWithoutModifyingAccessTimestamp(ds4Code);
                 will(returnValue(goodDs4Content));
 
-                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code), DataSetArchivingStatus.AVAILABLE, false);
+                one(openBISService).updateDataSetStatuses(Arrays.asList(ds4Code),
+                        DataSetArchivingStatus.AVAILABLE, false);
                 one(v3api).updateDataSets(with(SESSION_TOKEN), with((recordedUpdates)));
             }
         });
diff --git a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/FileInfoDssBuilderTest.java b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/FileInfoDssBuilderTest.java
index b62cf8ef005197606028ca75da4c15ebb379e078..1c2cf2e094a6dcd1bf09bba6d516d6cc39a59e2c 100644
--- a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/FileInfoDssBuilderTest.java
+++ b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/FileInfoDssBuilderTest.java
@@ -32,7 +32,7 @@ public class FileInfoDssBuilderTest extends AssertJUnit
     public void testFileInfoBuilderOnDir() throws IOException
     {
         String root =
-                "../datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1";
+                "../server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1";
         FileInfoDssBuilder builder = new FileInfoDssBuilder(root, root);
         ArrayList<FileInfoDssDTO> list = new ArrayList<FileInfoDssDTO>();
         File requestedFile = new File(root);
@@ -58,7 +58,7 @@ public class FileInfoDssBuilderTest extends AssertJUnit
     public void testFileInfoBuilderOnDirWithTrailingSlash() throws IOException
     {
         String root =
-                "../datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/";
+                "../server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/";
         FileInfoDssBuilder builder = new FileInfoDssBuilder(root, root);
         ArrayList<FileInfoDssDTO> list = new ArrayList<FileInfoDssDTO>();
         File requestedFile = new File(root);
diff --git a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/ValidationScriptRunnerTest.java b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/ValidationScriptRunnerTest.java
index abf1bff61039ca06b273d3dd7e5b126cc88e1f5c..cd094ca115224dff5e576d81085255cb3ec15b09 100644
--- a/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/ValidationScriptRunnerTest.java
+++ b/server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/ValidationScriptRunnerTest.java
@@ -30,7 +30,7 @@ public class ValidationScriptRunnerTest extends AssertJUnit
 {
 
     private static final String SCRIPTS_FOLDER =
-            "../datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/";
+            "../server-original-data-store/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/shared/api/v1/validation/";
 
     private static final String TEST_DATA_FOLDER = SCRIPTS_FOLDER;
 
@@ -52,7 +52,7 @@ public class ValidationScriptRunnerTest extends AssertJUnit
     {
         ValidationScriptRunner scriptRunner =
                 ValidationScriptRunner.createValidatorFromScriptPaths(new String[]
-                { BASIC_VALIDATION_SCRIPT });
+                        { BASIC_VALIDATION_SCRIPT });
         List<ValidationError> errors = scriptRunner.validate(new File(VALID_DATA_SET));
 
         assertTrue("The valid data set should have no errors", errors.isEmpty());
@@ -63,7 +63,7 @@ public class ValidationScriptRunnerTest extends AssertJUnit
     {
         ValidationScriptRunner scriptRunner =
                 ValidationScriptRunner.createValidatorFromScriptPaths(new String[]
-                { SPLITTED_VALIDATION_SCRIPT_1, SPLITTED_VALIDATION_SCRIPT_2 });
+                        { SPLITTED_VALIDATION_SCRIPT_1, SPLITTED_VALIDATION_SCRIPT_2 });
         List<ValidationError> errors = scriptRunner.validate(new File(VALID_DATA_SET));
 
         assertTrue("The valid data set should have no errors", errors.isEmpty());
@@ -74,7 +74,7 @@ public class ValidationScriptRunnerTest extends AssertJUnit
     {
         ValidationScriptRunner scriptRunner =
                 ValidationScriptRunner.createValidatorFromScriptPaths(new String[]
-                { BASIC_VALIDATION_SCRIPT });
+                        { BASIC_VALIDATION_SCRIPT });
         List<ValidationError> errors = scriptRunner.validate(new File(INVALID_DATA_SET));
 
         assertEquals(1, errors.size());
@@ -88,7 +88,7 @@ public class ValidationScriptRunnerTest extends AssertJUnit
     {
         ValidationScriptRunner scriptRunner =
                 ValidationScriptRunner.createValidatorFromScriptPaths(new String[]
-                { SPLITTED_VALIDATION_SCRIPT_1, SPLITTED_VALIDATION_SCRIPT_2 });
+                        { SPLITTED_VALIDATION_SCRIPT_1, SPLITTED_VALIDATION_SCRIPT_2 });
         List<ValidationError> errors = scriptRunner.validate(new File(INVALID_DATA_SET));
 
         assertEquals(1, errors.size());
diff --git a/test-api-openbis-javascript/servers/common/datastore_server/db/pathinfo_test_js_common.sql b/test-api-openbis-javascript/servers/common/datastore_server/db/pathinfo_test_js_common.sql
index 43a08bd4763ec25ba6c428ae91ed44e83a31998c..6f66d7e336605a7d8a07132441f35c1a7b577f97 100644
--- a/test-api-openbis-javascript/servers/common/datastore_server/db/pathinfo_test_js_common.sql
+++ b/test-api-openbis-javascript/servers/common/datastore_server/db/pathinfo_test_js_common.sql
@@ -13211,9 +13211,9 @@ SELECT pg_catalog.setval('data_sets_id_seq', 12, true);
 --
 
 COPY database_version_logs (db_version, module_name, run_status, run_status_timestamp, module_code, run_exception) FROM stdin;
-006	datastore_server/sql/postgresql/006/schema-006.sql	SUCCESS	2013-04-12 10:06:04.38	\\x0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a2f2a20446f6d61696e73202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202a2f0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a0a43524541544520444f4d41494e20544543485f494420415320424947494e543b0a0a43524541544520444f4d41494e20434f44452041532056415243484152283430293b0a0a43524541544520444f4d41494e2046494c455f5041544820415320564152434841522831303030293b0a0a43524541544520444f4d41494e20424f4f4c45414e5f4348415220415320424f4f4c45414e2044454641554c542046414c53453b0a0a43524541544520444f4d41494e2054494d455f5354414d502041532054494d455354414d5020574954482054494d45205a4f4e453b0a0a0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a2f2a205461626c657320202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202a2f0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a0a435245415445205441424c4520444154415f5345545320280a202049442042494753455249414c204e4f54204e554c4c2c0a2020434f444520434f4445204e4f54204e554c4c2c0a20204c4f434154494f4e2046494c455f50415448204e4f54204e554c4c2c0a0a20205052494d415259204b455920284944292c0a2020554e495155452028434f4445290a293b0a0a43524541544520494e44455820444154415f534554535f434f44455f494458204f4e20444154415f534554532028434f4445293b0a0a435245415445205441424c4520444154415f5345545f46494c455320280a202049442042494753455249414c204e4f54204e554c4c2c0a2020444153455f494420544543485f4944204e4f54204e554c4c2c0a2020504152454e545f494420544543485f49442c0a202052454c41544956455f504154482046494c455f50415448204e4f54204e554c4c2c0a202046494c455f4e414d452046494c455f50415448204e4f54204e554c4c2c0a202053495a455f494e5f425954455320424947494e54204e4f54204e554c4c2c0a2020434845434b53554d5f435243333220494e54454745522c0a202049535f4449524543544f525920424f4f4c45414e5f43484152204e4f54204e554c4c2c0a20204c4153545f4d4f4449464945442054494d455f5354414d50204e4f54204e554c4c2044454641554c54204e4f5728292c0a0a20205052494d415259204b455920284944292c0a2020434f4e53545241494e5420464b5f444154415f5345545f46494c45535f444154415f5345545320464f524549474e204b45592028444153455f494429205245464552454e43455320444154415f534554532028494429204f4e2044454c4554452043415343414445204f4e2055504441544520434153434144452c0a2020434f4e53545241494e5420464b5f444154415f5345545f46494c45535f444154415f5345545f46494c455320464f524549474e204b45592028504152454e545f494429205245464552454e43455320444154415f5345545f46494c45532028494429204f4e2044454c4554452043415343414445204f4e2055504441544520434153434144450a293b0a0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f494458204f4e20444154415f5345545f46494c45532028444153455f4944293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f504152454e545f49445f494458204f4e20444154415f5345545f46494c45532028444153455f49442c20504152454e545f4944293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f52454c41544956455f504154485f494458204f4e20444154415f5345545f46494c45532028444153455f49442c2052454c41544956455f50415448293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f46494c455f4e414d455f494458204f4e20444154415f5345545f46494c45532028444153455f49442c2046494c455f4e414d45293b0a0a435245415445205441424c45204556454e545320280a20204c4153545f5345454e5f44454c4554494f4e5f4556454e545f494420544543485f4944204e4f54204e554c4c0a293b0a0a435245415445205441424c45204c4153545f46454544494e475f4556454e5420280a2020524547495354524154494f4e5f54494d455354414d502054494d455f5354414d50204e4f54204e554c4c0a293b0a0a	\N
-007	../../../../datastore_server/source/sql/postgresql/migration/migration-006-007.sql	SUCCESS	2016-06-13 19:47:17.156	\\x2d2d206368616e676520434f444520646f6d61696e20746f2056415243484152283630292c20756e666f7274756e6174656c79206120747970652063616e6e6f7420626520616c746572656420666f7220616e206578697374696e6720646f6d61696e0a0a414c544552205441424c4520444154415f5345545320414c54455220434f4c554d4e20434f444520545950452056415243484152283630293b0a0a44524f5020444f4d41494e20434f44453b0a43524541544520444f4d41494e20434f44452041532056415243484152283630293b0a0a414c544552205441424c4520444154415f5345545320414c54455220434f4c554d4e20434f4445205459504520434f44453b0a	\N
-008	../../../../datastore_server/source/sql/postgresql/migration/migration-007-008.sql	SUCCESS	2016-06-13 19:47:17.179	\\x616c746572207461626c6520646174615f7365745f66696c65732064726f7020636f6e73747261696e742069662065786973747320666b5f646174615f7365745f66696c65735f646174615f7365745f66696c65733b0a	\N
+006	server-original-data-store/sql/postgresql/006/schema-006.sql	SUCCESS	2013-04-12 10:06:04.38	\\x0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a2f2a20446f6d61696e73202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202a2f0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a0a43524541544520444f4d41494e20544543485f494420415320424947494e543b0a0a43524541544520444f4d41494e20434f44452041532056415243484152283430293b0a0a43524541544520444f4d41494e2046494c455f5041544820415320564152434841522831303030293b0a0a43524541544520444f4d41494e20424f4f4c45414e5f4348415220415320424f4f4c45414e2044454641554c542046414c53453b0a0a43524541544520444f4d41494e2054494d455f5354414d502041532054494d455354414d5020574954482054494d45205a4f4e453b0a0a0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a2f2a205461626c657320202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202a2f0a2f2a202d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d2d202a2f0a0a435245415445205441424c4520444154415f5345545320280a202049442042494753455249414c204e4f54204e554c4c2c0a2020434f444520434f4445204e4f54204e554c4c2c0a20204c4f434154494f4e2046494c455f50415448204e4f54204e554c4c2c0a0a20205052494d415259204b455920284944292c0a2020554e495155452028434f4445290a293b0a0a43524541544520494e44455820444154415f534554535f434f44455f494458204f4e20444154415f534554532028434f4445293b0a0a435245415445205441424c4520444154415f5345545f46494c455320280a202049442042494753455249414c204e4f54204e554c4c2c0a2020444153455f494420544543485f4944204e4f54204e554c4c2c0a2020504152454e545f494420544543485f49442c0a202052454c41544956455f504154482046494c455f50415448204e4f54204e554c4c2c0a202046494c455f4e414d452046494c455f50415448204e4f54204e554c4c2c0a202053495a455f494e5f425954455320424947494e54204e4f54204e554c4c2c0a2020434845434b53554d5f435243333220494e54454745522c0a202049535f4449524543544f525920424f4f4c45414e5f43484152204e4f54204e554c4c2c0a20204c4153545f4d4f4449464945442054494d455f5354414d50204e4f54204e554c4c2044454641554c54204e4f5728292c0a0a20205052494d415259204b455920284944292c0a2020434f4e53545241494e5420464b5f444154415f5345545f46494c45535f444154415f5345545320464f524549474e204b45592028444153455f494429205245464552454e43455320444154415f534554532028494429204f4e2044454c4554452043415343414445204f4e2055504441544520434153434144452c0a2020434f4e53545241494e5420464b5f444154415f5345545f46494c45535f444154415f5345545f46494c455320464f524549474e204b45592028504152454e545f494429205245464552454e43455320444154415f5345545f46494c45532028494429204f4e2044454c4554452043415343414445204f4e2055504441544520434153434144450a293b0a0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f494458204f4e20444154415f5345545f46494c45532028444153455f4944293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f504152454e545f49445f494458204f4e20444154415f5345545f46494c45532028444153455f49442c20504152454e545f4944293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f52454c41544956455f504154485f494458204f4e20444154415f5345545f46494c45532028444153455f49442c2052454c41544956455f50415448293b0a43524541544520494e44455820444154415f5345545f46494c45535f444153455f49445f46494c455f4e414d455f494458204f4e20444154415f5345545f46494c45532028444153455f49442c2046494c455f4e414d45293b0a0a435245415445205441424c45204556454e545320280a20204c4153545f5345454e5f44454c4554494f4e5f4556454e545f494420544543485f4944204e4f54204e554c4c0a293b0a0a435245415445205441424c45204c4153545f46454544494e475f4556454e5420280a2020524547495354524154494f4e5f54494d455354414d502054494d455f5354414d50204e4f54204e554c4c0a293b0a0a	\N
+007	../../../../server-original-data-store/source/sql/postgresql/migration/migration-006-007.sql	SUCCESS	2016-06-13 19:47:17.156	\\x2d2d206368616e676520434f444520646f6d61696e20746f2056415243484152283630292c20756e666f7274756e6174656c79206120747970652063616e6e6f7420626520616c746572656420666f7220616e206578697374696e6720646f6d61696e0a0a414c544552205441424c4520444154415f5345545320414c54455220434f4c554d4e20434f444520545950452056415243484152283630293b0a0a44524f5020444f4d41494e20434f44453b0a43524541544520444f4d41494e20434f44452041532056415243484152283630293b0a0a414c544552205441424c4520444154415f5345545320414c54455220434f4c554d4e20434f4445205459504520434f44453b0a	\N
+008	../../../../server-original-data-store/source/sql/postgresql/migration/migration-007-008.sql	SUCCESS	2016-06-13 19:47:17.179	\\x616c746572207461626c6520646174615f7365745f66696c65732064726f7020636f6e73747261696e742069662065786973747320666b5f646174615f7365745f66696c65735f646174615f7365745f66696c65733b0a	\N
 \.
 
 
diff --git a/test-miscellaneous/screening/config/dss-service.properties b/test-miscellaneous/screening/config/dss-service.properties
index 13224b0937764ab1ac4691ef3363142d287a671f..c572055fe1c830f2a29827745626833c646b27f9 100644
--- a/test-miscellaneous/screening/config/dss-service.properties
+++ b/test-miscellaneous/screening/config/dss-service.properties
@@ -77,7 +77,7 @@ path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.Pa
 path-info-db.databaseEngineCode=postgresql
 path-info-db.basicDatabaseName=pathinfo
 path-info-db.databaseKind=platonic
-path-info-db.scriptFolder=../datastore_server/source/sql
+path-info-db.scriptFolder=../server-original-data-store/source/sql
 # ---------------------------------------------------------------------------
 # reporting and processing plugins configuration
 # ---------------------------------------------------------------------------
diff --git a/test-ui-core/dss-root/etc/service.properties b/test-ui-core/dss-root/etc/service.properties
index ac8f59206d510a2fa4cebf6ad5819a7d0c94ec0c..c26532947fec2cdc18c71ed0ed94cd645a9612dd 100644
--- a/test-ui-core/dss-root/etc/service.properties
+++ b/test-ui-core/dss-root/etc/service.properties
@@ -1,133 +1,100 @@
 # Unique code of this Data Store Server. Not more than 40 characters.
-data-store-server-code = external
-
-root-dir = targets
-
+data-store-server-code=external
+root-dir=targets
 # The root directory of the data store
-storeroot-dir = ${root-dir}/store
-
+storeroot-dir=${root-dir}/store
 # The directory for incoming files over rpc
-rpc-incoming-dir = ${root-dir}/incoming-rpc
-
-core-plugins-folder = source/core-plugins
-
-# The directory where the command queue file is located; defaults to storeroot-dir 
-commandqueue-dir =
-
+rpc-incoming-dir=${root-dir}/incoming-rpc
+core-plugins-folder=source/core-plugins
+# The directory where the command queue file is located; defaults to storeroot-dir
+commandqueue-dir=
 # Port
-port = 10002
-
+port=10002
 # Session timeout in minutes
-session-timeout = 30
-
+session-timeout=30
 # Set to 'false' for development/testing without deployed server. In this mode datastore will not use
 # SSL when connecting to openbis. Otherwise all 'keystore' properties need to be set for SSL connection 
 # (default when use-ssl property is not set so there is no need to specify it on production servers).
-use-ssl = false
-
+use-ssl=false
 # Path, password and key password for SSL connection
 #keystore.path = dist/etc/openBIS.keystore
 #keystore.password = changeit
 #keystore.key-password = changeit
-
 # The check interval (in seconds)
-check-interval = 5
-
+check-interval=5
 # The time-out for clean up work in the shutdown sequence (in seconds).
 # Note that that the maximal time for the shutdown sequence to complete can be as large 
 # as twice this time.
-shutdown-timeout = 2
-
+shutdown-timeout=2
 # The minimum time (in seconds) of availability of the data stream since the time when user requested
 # for the data stream url. If not specified default value (20 seconds) will be used.
 # data-stream-timeout = 20
-
 # If free disk space goes below value defined here, a notification email will be sent.
 # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
 # specified or if value is negative, the system will not be watching.
-highwater-mark = 1048576
-
-# If a data set is successfully registered it sends out an email to the registrator. 
+highwater-mark=1048576
+# If a data set is successfully registered it sends out an email to the registrator.
 # If this property is not specified, no email is sent to the registrator. This property
 # does not affect the mails which are sent, when the data set could not be registered.
-notify-successful-registration = false
-
+notify-successful-registration=false
 # The URL of the openBIS server
-server-url = http://localhost:10000
-
+server-url=http://localhost:10000
 # Time out for accessing openBIS server. Default value is 5 minutes.
-server-timeout-in-minutes = 10
-
+server-timeout-in-minutes=10
 # The location of the jars accessable to web start clients
-webstart-jar-path = targets/dist/datastore_server/lib
-
+webstart-jar-path=targets/dist/datastore_server/lib
 # The username to use when contacting the openBIS server
-username = etlserver
-
+username=etlserver
 # The password to use when contacting the openBIS server
-password = doesnotmatter
-
+password=doesnotmatter
 # The base URL for Web client access.
-download-url = http://localhost:10002
-
+download-url=http://localhost:10002
 # SMTP properties (must start with 'mail' to be considered).
-mail.smtp.host = file://${root-dir}/email
-mail.from = datastore_server@localhost
-mail.smtp.user = 
+mail.smtp.host=file://${root-dir}/email
+mail.from=datastore_server@localhost
+mail.smtp.user=
 mail.smtp.password = 
 # If this property is set a test e-mail will be sent to the specified address after DSS successfully started-up.
-mail.test.address = test@localhost
-
+mail.test.address=test@localhost
 # ---------------------------------------------------------------------------
 # Data sources
-data-sources = path-info-db 
-
+data-sources=path-info-db 
 # Data source for pathinfo database
-path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
-path-info-db.databaseEngineCode = postgresql
-path-info-db.basicDatabaseName = pathinfo
-path-info-db.databaseKind = ui_test
-path-info-db.scriptFolder = ../datastore_server/source/sql
-
-
+path-info-db.version-holder-class=ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder
+path-info-db.databaseEngineCode=postgresql
+path-info-db.basicDatabaseName=pathinfo
+path-info-db.databaseKind=ui_test
+path-info-db.scriptFolder=../server-original-data-store/source/sql
 # ---------------------------------------------------------------------------
-
 # Maximum number of retries if renaming failed.
 # renaming.failure.max-retries = 12
-
 # The number of milliseconds to wait before retrying to execute the renaming process.
 # renaming.failure.millis-to-sleep = 5000
-
-# Globally used separator character which separates entities in a data set file name 
-data-set-file-name-entity-separator = _
-
-# The period of no write access that needs to pass before an incoming data item is considered 
+# Globally used separator character which separates entities in a data set file name
+data-set-file-name-entity-separator=_
+# The period of no write access that needs to pass before an incoming data item is considered
 # complete and ready to be processed (in seconds) [default: 300]. 
 # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
-quiet-period = 3
-
+quiet-period=3
 # ---------------------------------------------------------------------------
 # reporting and processing plugins configuration
 # ---------------------------------------------------------------------------
-
 # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
 # If name has 'default-' prefix it will be used by default in data set Data View.
-reporting-plugins = demo-reporter, tsv-viewer, csv-viewer, hcs-viewer, hcs-viewer-tiff, default-hcs-viewer
-
+reporting-plugins=demo-reporter, tsv-viewer, csv-viewer, hcs-viewer, hcs-viewer-tiff, default-hcs-viewer
 # Label of the plugin which will be shown for the users.
-demo-reporter.label = Show Dataset Size
+demo-reporter.label=Show Dataset Size
 # Comma separated list of dataset type codes which can be handled by this plugin.
 # Wildcards are allowed, but the DSS server may need to be restarted if a new data set type is added to openBIS
-demo-reporter.dataset-types = .*
+demo-reporter.dataset-types=.*
 # Plugin class specification (together with the list of packages this class belongs to).
-demo-reporter.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
+demo-reporter.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
 # The property file. Its content will be passed as a parameter to the plugin.
-demo-reporter.properties-file = 
-
+demo-reporter.properties-file=
 tsv-viewer.label = TSV View 
-tsv-viewer.dataset-types = TSV
-tsv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
-tsv-viewer.properties-file =
+tsv-viewer.dataset-types=TSV
+tsv-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
+tsv-viewer.properties-file=
 # Optional properties:
 # - separator of values in the file, significant for TSV and CSV files; default: tab
 #tsv-viewer.separator = ;
@@ -135,125 +102,103 @@ tsv-viewer.properties-file =
 #tsv-viewer.ignore-comments = false
 # - excel sheet name or index (0 based) used for the excel file (.xsl or .xslx); default: 0 (first sheet)
 #tsv-viewer.excel-sheet = example_sheet_name
-
-csv-viewer.label = CSV View 
-csv-viewer.dataset-types = CSV
-csv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
-csv-viewer.separator = ,
-
-hcs-viewer.label = HCS PNG 	
-hcs-viewer.dataset-types = HCS_IMAGE
-hcs-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-hcs-viewer.download-url = ${download-url}
-hcs-viewer.data-set-regex = .*/PNG/.*\.jpg
-hcs-viewer.data-set-path = original
-
-hcs-viewer-tiff.label = HCS TIFF 	
-hcs-viewer-tiff.dataset-types = HCS_IMAGE
-hcs-viewer-tiff.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-hcs-viewer-tiff.download-url = ${download-url}
-hcs-viewer-tiff.data-set-regex = .*/TIFF/.*\.jpg
-hcs-viewer-tiff.data-set-path = original
-
-default-hcs-viewer.label = Default HCS View 	
-default-hcs-viewer.dataset-types = HCS_IMAGE
-default-hcs-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
-default-hcs-viewer.download-url = ${download-url}
-default-hcs-viewer.data-set-regex = .*\.jpg
-
-
+csv-viewer.label=CSV View 
+csv-viewer.dataset-types=CSV
+csv-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin
+csv-viewer.separator=,
+hcs-viewer.label=HCS PNG 	
+hcs-viewer.dataset-types=HCS_IMAGE
+hcs-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+hcs-viewer.download-url=${download-url}
+hcs-viewer.data-set-regex=.*/PNG/.*\.jpg
+hcs-viewer.data-set-path=original
+hcs-viewer-tiff.label=HCS TIFF 	
+hcs-viewer-tiff.dataset-types=HCS_IMAGE
+hcs-viewer-tiff.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+hcs-viewer-tiff.download-url=${download-url}
+hcs-viewer-tiff.data-set-regex=.*/TIFF/.*\.jpg
+hcs-viewer-tiff.data-set-path=original
+default-hcs-viewer.label=Default HCS View 	
+default-hcs-viewer.dataset-types=HCS_IMAGE
+default-hcs-viewer.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.GenericDssLinkReportingPlugin
+default-hcs-viewer.download-url=${download-url}
+default-hcs-viewer.data-set-regex=.*\.jpg
 # Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
-processing-plugins = demo-processor
-
-# The configuration of the processing plugin is the same as the reporting plugins configuration. 
-demo-processor.label = Demo Processing
+processing-plugins=demo-processor
+# The configuration of the processing plugin is the same as the reporting plugins configuration.
+demo-processor.label=Demo Processing
 # Wildcards are allowed, but the DSS server may need to be restarted if a new data set type is added to openBIS
-demo-processor.dataset-types = HCS_.*, CONTAINER_.*
-demo-processor.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoProcessingPlugin
-demo-processor.properties-file = 
-
+demo-processor.dataset-types=HCS_.*, CONTAINER_.*
+demo-processor.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoProcessingPlugin
+demo-processor.properties-file=
 # Data set validators used to accept or reject data sets to be registered.
 # Comma separated list of validator definitions.
-data-set-validators = validator
-
+data-set-validators=validator
 # Definition of data set validator 'validator'
-validator.data-set-type = HCS_IMAGE
-validator.path-patterns = **/*.txt
-validator.columns = id, description, size
-validator.id.header-pattern = ID
-validator.id.mandatory = true
-validator.id.order = 1
-validator.id.value-type = unique
-validator.description.header-pattern = Description
-validator.description.value-type = string
-validator.description.value-pattern = .{0,100}
-validator.size.header-pattern = A[0-9]+
-validator.size.can-define-multiple-columns = true
-validator.size.allow-empty-values = true
-validator.size.value-type = numeric
-validator.site.value-range = [0,Infinity)
-
+validator.data-set-type=HCS_IMAGE
+validator.path-patterns=**/*.txt
+validator.columns=id, description, size
+validator.id.header-pattern=ID
+validator.id.mandatory=true
+validator.id.order=1
+validator.id.value-type=unique
+validator.description.header-pattern=Description
+validator.description.value-type=string
+validator.description.value-pattern=.{0,100}
+validator.size.header-pattern=A[0-9]+
+validator.size.can-define-multiple-columns=true
+validator.size.allow-empty-values=true
+validator.size.value-type=numeric
+validator.site.value-range=[0,Infinity)
 # ---------------------------------------------------------------------------
-
 # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
 # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
 inputs=jython-thread, main-thread, tsv-thread, csv-thread, simple-thread, hdf5-thread, dss-system-test-thread
-
-# True if incoming directories should be created on server startup if they don't exist. 
+# True if incoming directories should be created on server startup if they don't exist.
 # Default - false (server will fail at startup if one of incoming directories doesn't exist). 
-incoming-dir-create = true
-
+incoming-dir-create=true
 # --------------------------------------------------------------------------------------------------
 # Jython thread configuration
 # --------------------------------------------------------------------------------------------------
 # The directory to watch for incoming data.
-jython-thread.incoming-dir = ${root-dir}/incoming-jython
-jython-thread.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.JythonTopLevelDataSetHandler
-jython-thread.incoming-data-completeness-condition = auto-detection
-jython-thread.strip-file-extension = true
-jython-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
-jython-thread.script-path = etc/jython-data-set-handler.py
-
-
+jython-thread.incoming-dir=${root-dir}/incoming-jython
+jython-thread.top-level-data-set-handler=ch.systemsx.cisd.etlserver.registrator.JythonTopLevelDataSetHandler
+jython-thread.incoming-data-completeness-condition=auto-detection
+jython-thread.strip-file-extension=true
+jython-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+jython-thread.script-path=etc/jython-data-set-handler.py
 # ---------------------------------------------------------------------------
 # Dataset uploader 'main-thread' configuration
 # ---------------------------------------------------------------------------
-
 # The directory to watch for incoming data.
-main-thread.incoming-dir = ${root-dir}/incoming
-
-# Specifies what should happen if an error occurs during dataset processing. 
+main-thread.incoming-dir=${root-dir}/incoming
+# Specifies what should happen if an error occurs during dataset processing.
 # By default this flag is set to false and user has to modify the 'faulty paths file' 
 # each time the faulty dataset should be processed again.
 # Set this flag to true if the processing should be repeated after some time without manual intervention.
 # Note that this can increase the server load.
 # main-thread.reprocess-faulty-datasets = false
-
 # Determines when the incoming data should be considered complete and ready to be processed.
 # Allowed values: 
 #  - auto-detection - when no write access will be detected for a specified 'quite-period'
 #  - marker-file		- when an appropriate marker file for the data exists. 
 # The default value is 'marker-file'.
-main-thread.incoming-data-completeness-condition = auto-detection
-
+main-thread.incoming-data-completeness-condition=auto-detection
 # The store format that should be applied in the incoming directory.
-main-thread.incoming-dir.format = 
-
+main-thread.incoming-dir.format=
 # The extractor plugin class to use for code extraction
-main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+main-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-main-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
+main-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
 #main-thread.data-set-info-extractor.space-code = CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-main-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-main-thread.type-extractor.file-format-type = TIFF
-main-thread.type-extractor.locator-type = RELATIVE_LOCATION
-main-thread.type-extractor.data-set-type = HCS_IMAGE
-
+main-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+main-thread.type-extractor.file-format-type=TIFF
+main-thread.type-extractor.locator-type=RELATIVE_LOCATION
+main-thread.type-extractor.data-set-type=HCS_IMAGE
 # The storage processor (IStorageProcessor implementation)
-main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+main-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 #main-thread.storage-processor.version = 1.1
 #main-thread.storage-processor.sampleTypeCode = CELL_PLATE
 #main-thread.storage-processor.sampleTypeDescription = Screening Plate
@@ -262,102 +207,90 @@ main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcess
 #main-thread.storage-processor.contains_original_data = TRUE
 #main-thread.storage-processor.well_geometry = 3x3
 #main-thread.storage-processor.file-extractor = ch.systemsx.cisd.etlserver.imsb.HCSImageFileExtractor
-
-# Path to the script that will be executed after successful data set registration. 
+# Path to the script that will be executed after successful data set registration.
 # The script will be called with two parameters: <data-set-code> and <absolute-data-set-path> (in the data store).
 # main-thread.post-registration-script = /example/scripts/my-script.sh
-
 # The directory to watch for incoming data.
-tsv-thread.incoming-dir = ${root-dir}/incoming-tsv
-tsv-thread.incoming-data-completeness-condition = auto-detection
-tsv-thread.strip-file-extension = true
-tsv-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+tsv-thread.incoming-dir=${root-dir}/incoming-tsv
+tsv-thread.incoming-data-completeness-condition=auto-detection
+tsv-thread.strip-file-extension=true
+tsv-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-tsv-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+tsv-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-tsv-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-tsv-thread.data-set-info-extractor.strip-file-extension = true
+tsv-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+tsv-thread.data-set-info-extractor.strip-file-extension=true
 #main-thread.data-set-info-extractor.space-code = CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-tsv-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-tsv-thread.type-extractor.file-format-type = TSV
-tsv-thread.type-extractor.locator-type = RELATIVE_LOCATION
-tsv-thread.type-extractor.data-set-type = TSV
-
-csv-thread.incoming-dir = ${root-dir}/incoming-csv
-csv-thread.incoming-data-completeness-condition = auto-detection
-csv-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+tsv-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+tsv-thread.type-extractor.file-format-type=TSV
+tsv-thread.type-extractor.locator-type=RELATIVE_LOCATION
+tsv-thread.type-extractor.data-set-type=TSV
+csv-thread.incoming-dir=${root-dir}/incoming-csv
+csv-thread.incoming-data-completeness-condition=auto-detection
+csv-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-csv-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+csv-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-csv-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-csv-thread.data-set-info-extractor.strip-file-extension = true
-csv-thread.data-set-info-extractor.space-code = CISD
+csv-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+csv-thread.data-set-info-extractor.strip-file-extension=true
+csv-thread.data-set-info-extractor.space-code=CISD
 # Location of file containing data set properties 
 #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-csv-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-csv-thread.type-extractor.file-format-type = CSV
-csv-thread.type-extractor.locator-type = RELATIVE_LOCATION
-csv-thread.type-extractor.data-set-type = CSV
-
+csv-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+csv-thread.type-extractor.file-format-type=CSV
+csv-thread.type-extractor.locator-type=RELATIVE_LOCATION
+csv-thread.type-extractor.data-set-type=CSV
 # The directory to watch for incoming data.
-simple-thread.incoming-dir = ${root-dir}/incoming-simple
-simple-thread.incoming-data-completeness-condition = auto-detection
-simple-thread.strip-file-extension = true
-simple-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+simple-thread.incoming-dir=${root-dir}/incoming-simple
+simple-thread.incoming-data-completeness-condition=auto-detection
+simple-thread.strip-file-extension=true
+simple-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-simple-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+simple-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-simple-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-simple-thread.data-set-info-extractor.strip-file-extension = true
-simple-thread.data-set-info-extractor.space-code = CISD
-
-simple-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-simple-thread.type-extractor.file-format-type = TIFF
-simple-thread.type-extractor.locator-type = RELATIVE_LOCATION
-simple-thread.type-extractor.data-set-type = HCS_IMAGE
-
+simple-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+simple-thread.data-set-info-extractor.strip-file-extension=true
+simple-thread.data-set-info-extractor.space-code=CISD
+simple-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+simple-thread.type-extractor.file-format-type=TIFF
+simple-thread.type-extractor.locator-type=RELATIVE_LOCATION
+simple-thread.type-extractor.data-set-type=HCS_IMAGE
 # The directory to watch for incoming data.
-hdf5-thread.incoming-dir = ${root-dir}/incoming-hdf5
-hdf5-thread.incoming-data-completeness-condition = auto-detection
-hdf5-thread.strip-file-extension = true
-hdf5-thread.storage-processor = ch.systemsx.cisd.etlserver.hdf5.HDF5StorageProcessor
+hdf5-thread.incoming-dir=${root-dir}/incoming-hdf5
+hdf5-thread.incoming-data-completeness-condition=auto-detection
+hdf5-thread.strip-file-extension=true
+hdf5-thread.storage-processor=ch.systemsx.cisd.etlserver.hdf5.HDF5StorageProcessor
 # The extractor plugin class to use for code extraction
-hdf5-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+hdf5-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-hdf5-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-hdf5-thread.data-set-info-extractor.strip-file-extension = true
-hdf5-thread.data-set-info-extractor.space-code = SPACE
-
-hdf5-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-hdf5-thread.type-extractor.file-format-type = HDF5
-hdf5-thread.type-extractor.locator-type = RELATIVE_LOCATION
-hdf5-thread.type-extractor.data-set-type = HCS_IMAGE
-
-dss-system-test-thread.incoming-dir = ${root-dir}/incoming-simple
-dss-system-test-thread.incoming-data-completeness-condition = auto-detection
-dss-system-test-thread.strip-file-extension = true
-dss-system-test-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor
+hdf5-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+hdf5-thread.data-set-info-extractor.strip-file-extension=true
+hdf5-thread.data-set-info-extractor.space-code=SPACE
+hdf5-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+hdf5-thread.type-extractor.file-format-type=HDF5
+hdf5-thread.type-extractor.locator-type=RELATIVE_LOCATION
+hdf5-thread.type-extractor.data-set-type=HCS_IMAGE
+dss-system-test-thread.incoming-dir=${root-dir}/incoming-simple
+dss-system-test-thread.incoming-data-completeness-condition=auto-detection
+dss-system-test-thread.strip-file-extension=true
+dss-system-test-thread.storage-processor=ch.systemsx.cisd.etlserver.DefaultStorageProcessor
 # The extractor plugin class to use for code extraction
-dss-system-test-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
+dss-system-test-thread.data-set-info-extractor=ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
 # Separator used to extract the barcode in the data set file name
-dss-system-test-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
-dss-system-test-thread.data-set-info-extractor.strip-file-extension = true
-dss-system-test-thread.data-set-info-extractor.space-code = CISD
-dss-system-test-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties
-
-dss-system-test-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
-dss-system-test-thread.type-extractor.file-format-type = TIFF
-dss-system-test-thread.type-extractor.locator-type = RELATIVE_LOCATION
-dss-system-test-thread.type-extractor.data-set-type = HCS_IMAGE
-
+dss-system-test-thread.data-set-info-extractor.entity-separator=${data-set-file-name-entity-separator}
+dss-system-test-thread.data-set-info-extractor.strip-file-extension=true
+dss-system-test-thread.data-set-info-extractor.space-code=CISD
+dss-system-test-thread.data-set-info-extractor.data-set-properties-file-name=data-set.properties
+dss-system-test-thread.type-extractor=ch.systemsx.cisd.etlserver.SimpleTypeExtractor
+dss-system-test-thread.type-extractor.file-format-type=TIFF
+dss-system-test-thread.type-extractor.locator-type=RELATIVE_LOCATION
+dss-system-test-thread.type-extractor.data-set-type=HCS_IMAGE
 # ---------------------------------------------------------------------------
 # (optional) image overview plugins configuration
 # ---------------------------------------------------------------------------
-
-# Comma separated names of image overview plugins. 
+# Comma separated names of image overview plugins.
 # Each plugin should have configuration properties prefixed with its name.
 # Generic properties for each <plugin> include: 
 #   <plugin>.class   - Fully qualified plugin class name (mandatory).
@@ -365,39 +298,32 @@ dss-system-test-thread.type-extractor.data-set-type = HCS_IMAGE
 #                      by the plugin (default = false). 
 #   <plugin>.dataset-types - Comma separated list of data set types handled by the plugin 
 #                      (optional and ignored if default is true, otherwise mandatory). 
-overview-plugins = default-overview, hcs-image-overview
-
-default-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
-default-overview.default = true
+overview-plugins=default-overview, hcs-image-overview
+default-overview.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
+default-overview.default=true
 # Optional property specific to the plugin
-default-overview.label = default plugin
-
-hcs-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
-hcs-image-overview.dataset-types = HCS_IMAGE
-hcs-image-overview.label = plugin for HCS_IMAGE
-
+default-overview.label=default plugin
+hcs-image-overview.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
+hcs-image-overview.dataset-types=HCS_IMAGE
+hcs-image-overview.label=plugin for HCS_IMAGE
 # ---------------------------------------------------------------------------
 # (optional) archiver configuration
 # ---------------------------------------------------------------------------
-
 # Configuration of an archiver task. All properties are prefixed with 'archiver.'.
-
 # Archiver class specification (together with the list of packages this class belongs to).
 #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver
-archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiver
+archiver.class=ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiver
 # dectination of the archive (can be local or remote)
 # local:
 #archiver.destination = openbis:tmp/dest
 # remote:
-archiver.destination = /Users/openbis/dest
+archiver.destination=/Users/openbis/dest
 # indicates if data should be synchronized when local copy differs from one in archive (default: true)
-archiver.synchronize-archive = true
-
+archiver.synchronize-archive=true
 # ---------------------------------------------------------------------------
 # maintenance plugins configuration
 # ---------------------------------------------------------------------------
-
-# Comma separated names of maintenance plugins.  
+# Comma separated names of maintenance plugins.
 # Each plugin should have configuration properties prefixed with its name.
 # Mandatory properties for each <plugin> include: 
 #   <plugin>.class - Fully qualified plugin class name
@@ -406,15 +332,14 @@ archiver.synchronize-archive = true
 #   <plugin>.start - Time of the first execution (HH:mm)
 #   <plugin>.execute-only-once - If true the task will be executed exactly once, 
 #                                interval will be ignored. By default set to false.
-maintenance-plugins = auto-archiver, path-info-deletion, post-registration
+maintenance-plugins=auto-archiver, path-info-deletion, post-registration
 #maintenance-plugins = auto-archiver, path-info-feeding
-
 # Performs automatic archivization of 'ACTIVE' data sets based on their properties
-auto-archiver.class = ch.systemsx.cisd.etlserver.plugins.AutoArchiverTask
+auto-archiver.class=ch.systemsx.cisd.etlserver.plugins.AutoArchiverTask
 # The time between subsequent archivizations (in seconds)
-auto-archiver.interval = 10
+auto-archiver.interval=10
 #  Time of the first execution (HH:mm)
-auto-archiver.start = 23:00
+auto-archiver.start=23:00
 # following properties are optional
 # only data sets of specified type will be archived  
 #auto-archiver.data-set-type = UNKNOWN
@@ -422,30 +347,25 @@ auto-archiver.start = 23:00
 #auto-archiver.older-than = 90
 # fully qualified class name of a policy that additionally filters data sets to be filtered
 #auto-archiver.policy.class = ch.systemsx.cisd.etlserver.plugins.DummyAutoArchiverPolicy
-
 # Maintenance task (performed only once) to create paths of existing data sets in pathinfo database
-path-info-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
-path-info-feeding.execute-only-once = true
-
+path-info-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
+path-info-feeding.execute-only-once=true
 # Maintenance task for deleting entries in pathinfo database after deletion of data sets
-path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
-path-info-deletion.interval = 120
-path-info-deletion.data-source = path-info-db
-path-info-deletion.data-set-perm-id = CODE
-
-# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database  
-post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
-post-registration.interval = 30
-post-registration.cleanup-tasks-folder = targets/cleanup-tasks
+path-info-deletion.class=ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask
+path-info-deletion.interval=120
+path-info-deletion.data-source=path-info-db
+path-info-deletion.data-set-perm-id=CODE
+# Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database
+post-registration.class=ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask
+post-registration.interval=30
+post-registration.cleanup-tasks-folder=targets/cleanup-tasks
 # The following date should the day when the DDS is started up the first time with PathInfoDatabaseFeedingTask.
 # After PathInfoDatabaseFeedingTask has been performed it can be removed and the following line can be deleted.
 #post-registration.ignore-data-sets-before-date = 2011-04-18
-post-registration.last-seen-data-set-file = targets/last-seen-data-set
-post-registration.post-registration-tasks = pathinfo-feeding
-post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
+post-registration.last-seen-data-set-file=targets/last-seen-data-set
+post-registration.post-registration-tasks=pathinfo-feeding
+post-registration.pathinfo-feeding.class=ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask
 # post-registration.pathinfo-feeding.compute-checksum = true
-
 # DSS RPC API Configuration
-dss-rpc.put-default = jython-thread   
-
+dss-rpc.put-default=jython-thread   
 jython-version=2.7