diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..1572beba93e220d5f818b3dad65831a12322de86
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,203 @@
+/*
+ *                                 Apache License
+ *                           Version 2.0, January 2004
+ *                        http://www.apache.org/licenses/
+ *
+ *   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+ *
+ *   1. Definitions.
+ *
+ *      "License" shall mean the terms and conditions for use, reproduction,
+ *      and distribution as defined by Sections 1 through 9 of this document.
+ *
+ *      "Licensor" shall mean the copyright owner or entity authorized by
+ *      the copyright owner that is granting the License.
+ *
+ *      "Legal Entity" shall mean the union of the acting entity and all
+ *      other entities that control, are controlled by, or are under common
+ *      control with that entity. For the purposes of this definition,
+ *      "control" means (i) the power, direct or indirect, to cause the
+ *      direction or management of such entity, whether by contract or
+ *      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ *      outstanding shares, or (iii) beneficial ownership of such entity.
+ *
+ *      "You" (or "Your") shall mean an individual or Legal Entity
+ *      exercising permissions granted by this License.
+ *
+ *      "Source" form shall mean the preferred form for making modifications,
+ *      including but not limited to software source code, documentation
+ *      source, and configuration files.
+ *
+ *      "Object" form shall mean any form resulting from mechanical
+ *      transformation or translation of a Source form, including but
+ *      not limited to compiled object code, generated documentation,
+ *      and conversions to other media types.
+ *
+ *      "Work" shall mean the work of authorship, whether in Source or
+ *      Object form, made available under the License, as indicated by a
+ *      copyright notice that is included in or attached to the work
+ *      (an example is provided in the Appendix below).
+ *
+ *      "Derivative Works" shall mean any work, whether in Source or Object
+ *      form, that is based on (or derived from) the Work and for which the
+ *      editorial revisions, annotations, elaborations, or other modifications
+ *      represent, as a whole, an original work of authorship. For the purposes
+ *      of this License, Derivative Works shall not include works that remain
+ *      separable from, or merely link (or bind by name) to the interfaces of,
+ *      the Work and Derivative Works thereof.
+ *
+ *      "Contribution" shall mean any work of authorship, including
+ *      the original version of the Work and any modifications or additions
+ *      to that Work or Derivative Works thereof, that is intentionally
+ *      submitted to Licensor for inclusion in the Work by the copyright owner
+ *      or by an individual or Legal Entity authorized to submit on behalf of
+ *      the copyright owner. For the purposes of this definition, "submitted"
+ *      means any form of electronic, verbal, or written communication sent
+ *      to the Licensor or its representatives, including but not limited to
+ *      communication on electronic mailing lists, source code control systems,
+ *      and issue tracking systems that are managed by, or on behalf of, the
+ *      Licensor for the purpose of discussing and improving the Work, but
+ *      excluding communication that is conspicuously marked or otherwise
+ *      designated in writing by the copyright owner as "Not a Contribution."
+ *
+ *      "Contributor" shall mean Licensor and any individual or Legal Entity
+ *      on behalf of whom a Contribution has been received by Licensor and
+ *      subsequently incorporated within the Work.
+ *
+ *   2. Grant of Copyright License. Subject to the terms and conditions of
+ *      this License, each Contributor hereby grants to You a perpetual,
+ *      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ *      copyright license to reproduce, prepare Derivative Works of,
+ *      publicly display, publicly perform, sublicense, and distribute the
+ *      Work and such Derivative Works in Source or Object form.
+ *
+ *   3. Grant of Patent License. Subject to the terms and conditions of
+ *      this License, each Contributor hereby grants to You a perpetual,
+ *      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ *      (except as stated in this section) patent license to make, have made,
+ *      use, offer to sell, sell, import, and otherwise transfer the Work,
+ *      where such license applies only to those patent claims licensable
+ *      by such Contributor that are necessarily infringed by their
+ *      Contribution(s) alone or by combination of their Contribution(s)
+ *      with the Work to which such Contribution(s) was submitted. If You
+ *      institute patent litigation against any entity (including a
+ *      cross-claim or counterclaim in a lawsuit) alleging that the Work
+ *      or a Contribution incorporated within the Work constitutes direct
+ *      or contributory patent infringement, then any patent licenses
+ *      granted to You under this License for that Work shall terminate
+ *      as of the date such litigation is filed.
+ *
+ *   4. Redistribution. You may reproduce and distribute copies of the
+ *      Work or Derivative Works thereof in any medium, with or without
+ *      modifications, and in Source or Object form, provided that You
+ *      meet the following conditions:
+ *
+ *      (a) You must give any other recipients of the Work or
+ *          Derivative Works a copy of this License; and
+ *
+ *      (b) You must cause any modified files to carry prominent notices
+ *          stating that You changed the files; and
+ *
+ *      (c) You must retain, in the Source form of any Derivative Works
+ *          that You distribute, all copyright, patent, trademark, and
+ *          attribution notices from the Source form of the Work,
+ *          excluding those notices that do not pertain to any part of
+ *          the Derivative Works; and
+ *
+ *      (d) If the Work includes a "NOTICE" text file as part of its
+ *          distribution, then any Derivative Works that You distribute must
+ *          include a readable copy of the attribution notices contained
+ *          within such NOTICE file, excluding those notices that do not
+ *          pertain to any part of the Derivative Works, in at least one
+ *          of the following places: within a NOTICE text file distributed
+ *          as part of the Derivative Works; within the Source form or
+ *          documentation, if provided along with the Derivative Works; or,
+ *          within a display generated by the Derivative Works, if and
+ *          wherever such third-party notices normally appear. The contents
+ *          of the NOTICE file are for informational purposes only and
+ *          do not modify the License. You may add Your own attribution
+ *          notices within Derivative Works that You distribute, alongside
+ *          or as an addendum to the NOTICE text from the Work, provided
+ *          that such additional attribution notices cannot be construed
+ *          as modifying the License.
+ *
+ *      You may add Your own copyright statement to Your modifications and
+ *      may provide additional or different license terms and conditions
+ *      for use, reproduction, or distribution of Your modifications, or
+ *      for any such Derivative Works as a whole, provided Your use,
+ *      reproduction, and distribution of the Work otherwise complies with
+ *      the conditions stated in this License.
+ *
+ *   5. Submission of Contributions. Unless You explicitly state otherwise,
+ *      any Contribution intentionally submitted for inclusion in the Work
+ *      by You to the Licensor shall be under the terms and conditions of
+ *      this License, without any additional terms or conditions.
+ *      Notwithstanding the above, nothing herein shall supersede or modify
+ *      the terms of any separate license agreement you may have executed
+ *      with Licensor regarding such Contributions.
+ *
+ *   6. Trademarks. This License does not grant permission to use the trade
+ *      names, trademarks, service marks, or product names of the Licensor,
+ *      except as required for reasonable and customary use in describing the
+ *      origin of the Work and reproducing the content of the NOTICE file.
+ *
+ *   7. Disclaimer of Warranty. Unless required by applicable law or
+ *      agreed to in writing, Licensor provides the Work (and each
+ *      Contributor provides its Contributions) on an "AS IS" BASIS,
+ *      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ *      implied, including, without limitation, any warranties or conditions
+ *      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ *      PARTICULAR PURPOSE. You are solely responsible for determining the
+ *      appropriateness of using or redistributing the Work and assume any
+ *      risks associated with Your exercise of permissions under this License.
+ *
+ *   8. Limitation of Liability. In no event and under no legal theory,
+ *      whether in tort (including negligence), contract, or otherwise,
+ *      unless required by applicable law (such as deliberate and grossly
+ *      negligent acts) or agreed to in writing, shall any Contributor be
+ *      liable to You for damages, including any direct, indirect, special,
+ *      incidental, or consequential damages of any character arising as a
+ *      result of this License or out of the use or inability to use the
+ *      Work (including but not limited to damages for loss of goodwill,
+ *      work stoppage, computer failure or malfunction, or any and all
+ *      other commercial damages or losses), even if such Contributor
+ *      has been advised of the possibility of such damages.
+ *
+ *   9. Accepting Warranty or Additional Liability. While redistributing
+ *      the Work or Derivative Works thereof, You may choose to offer,
+ *      and charge a fee for, acceptance of support, warranty, indemnity,
+ *      or other liability obligations and/or rights consistent with this
+ *      License. However, in accepting such obligations, You may act only
+ *      on Your own behalf and on Your sole responsibility, not on behalf
+ *      of any other Contributor, and only if You agree to indemnify,
+ *      defend, and hold each Contributor harmless for any liability
+ *      incurred by, or claims asserted against, such Contributor by reason
+ *      of your accepting any such warranty or additional liability.
+ *
+ *   END OF TERMS AND CONDITIONS
+ *
+ *   APPENDIX: How to apply the Apache License to your work.
+ *
+ *      To apply the Apache License to your work, attach the following
+ *      boilerplate notice, with the fields enclosed by brackets "[]"
+ *      replaced with your own identifying information. (Don't include
+ *      the brackets!)  The text should be enclosed in the appropriate
+ *      comment syntax for the file format. We also recommend that a
+ *      file or class name and description of purpose be included on the
+ *      same "printed page" as the copyright notice for easier
+ *      identification within third-party archives.
+ *
+ *   Copyright [yyyy] [name of copyright owner]
+ *
+ *   Licensed under the Apache License, Version 2.0 (the "License");
+ *   you may not use this file except in compliance with the License.
+ *   You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an "AS IS" BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing permissions and
+ *   limitations under the License.
+ */
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
index 8531f3c267f3bdc1c305f32b0515d046c7c51e78..d9f5b3e210f735d4bce832b4f493ce10e4029afa 100644
--- a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
@@ -27,13 +27,11 @@ package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
@@ -42,7 +40,9 @@ import javax.activation.DataHandler;
 import javax.activation.DataSource;
 import javax.mail.util.ByteArrayDataSource;
 
+import org.apache.log4j.DailyRollingFileAppender;
 import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
 
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.SyncEntityKind;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.ConfigReader;
@@ -78,7 +78,7 @@ public class HarvesterMaintenanceTask<T extends DataSetInformation> implements I
 
     private static final String LAST_INCREMENTAL_SYNC_TIMESTAMP = "last-incremental-sync-timestamp";
 
-    protected static final Logger operationLog =
+    private static final Logger operationLog =
             LogFactory.getLogger(LogCategory.OPERATION, HarvesterMaintenanceTask.class);
 
     private static final String HARVESTER_CONFIG_FILE_PROPERTY_NAME = "harvester-config-file";
@@ -97,7 +97,7 @@ public class HarvesterMaintenanceTask<T extends DataSetInformation> implements I
 
     private String dataStoreCode;
 
-    class Timestamps
+    private static class Timestamps
     {
         final Date lastIncSyncTimestamp;
 
@@ -143,96 +143,115 @@ public class HarvesterMaintenanceTask<T extends DataSetInformation> implements I
     {
         operationLog.info(this.getClass() + " started.");
 
-        SynchronizationConfigReader syncConfigReader = new SynchronizationConfigReader();
         List<SyncConfig> configs;
         try
         {
-            configs = syncConfigReader.readConfiguration(harvesterConfigFile, operationLog);
-        } catch (Exception e)
-        {
-            operationLog.error("", e);
-            return;
-        }
-
-        for (SyncConfig config : configs)
-        {
-            try
+            configs = SynchronizationConfigReader.readConfiguration(harvesterConfigFile);
+            for (SyncConfig config : configs)
             {
-                operationLog.info("-------------------------------------------------------------------------------------------");
-                operationLog.info("-------------------------------------------------------------------------------------------");
-                operationLog
-                        .info((config.isDryRun() ? "Dry " : "") + "Running synchronization from data source: " + config.getDataSourceOpenbisURL()
-                                + " for user " + config.getUser());
-
-                operationLog.info("verbose =  " + config.isVerbose());
-                String fileName = config.getLastSyncTimestampFileName();
-                File lastSyncTimestampFile = new File(fileName);
-                Timestamps timestamps = loadCutOffTimeStamps(lastSyncTimestampFile);
-
-                Date cutOffTimestamp = timestamps.lastIncSyncTimestamp;
-                boolean isFullSync = lastSyncTimestampFile.exists() == false || isTimeForFullSync(config, timestamps.lastFullSyncTimestamp);
-                operationLog.info("Last incremental sync timestamp: " + timestamps.lastIncSyncTimestamp);
-                operationLog.info("Last full sync timestamp: " + timestamps.lastFullSyncTimestamp);
-                if (isFullSync == true)
+                Logger logger = createLogger(config);
+                try
                 {
-                    cutOffTimestamp = new Date(0L);
-                    if (lastSyncTimestampFile.exists() == false)
+                    logger.info("====================== " + (config.isDryRun() ? "Dry " : "")
+                            + "Running synchronization from data source: " + config.getDataSourceOpenbisURL()
+                            + " for user " + config.getUser());
+                    
+                    logger.info("verbose =  " + config.isVerbose());
+                    String fileName = config.getLastSyncTimestampFileName();
+                    File lastSyncTimestampFile = new File(fileName);
+                    Timestamps timestamps = loadCutOffTimeStamps(lastSyncTimestampFile);
+                    
+                    Date cutOffTimestamp = timestamps.lastIncSyncTimestamp;
+                    boolean isFullSync = lastSyncTimestampFile.exists() == false || isTimeForFullSync(config, timestamps.lastFullSyncTimestamp);
+                    logger.info("Last incremental sync timestamp: " + timestamps.lastIncSyncTimestamp);
+                    logger.info("Last full sync timestamp: " + timestamps.lastFullSyncTimestamp);
+                    if (isFullSync == true)
                     {
-                        operationLog.info("Performing a full initial sync");
+                        cutOffTimestamp = new Date(0L);
+                        if (lastSyncTimestampFile.exists() == false)
+                        {
+                            logger.info("Performing a full initial sync");
+                        }
+                        else
+                        {
+                            logger.info("Performing a full sync as a minimum of " + config.getFullSyncInterval()
+                            + " day(s) have elapsed since last full sync.");
+                        }
                     }
                     else
                     {
-                        operationLog.info("Performing a full sync as a minimum of " + config.getFullSyncInterval()
-                                + " day(s) have elapsed since last full sync.");
+                        logger.info("Performing an incremental sync");
                     }
-                }
-                else
-                {
-                    operationLog.info("Performing an incremental sync");
-                }
-                String notSyncedEntitiesFileName = config.getNotSyncedEntitiesFileName();
-                Set<String> notSyncedDataSetCodes = getNotSyncedDataSetCodes(notSyncedEntitiesFileName);
-                Set<String> notSyncedAttachmentHolderCodes = getNotSyncedAttachmentHolderCodes(notSyncedEntitiesFileName);
-                Set<String> blackListedDataSetCodes = getBlackListedDataSetCodes(notSyncedEntitiesFileName);
-
-                Date newCutOffTimestamp = new Date();
-
-                EntitySynchronizer synchronizer =
-                        new EntitySynchronizer(service, dataStoreCode, storeRoot, cutOffTimestamp, timestamps.lastIncSyncTimestamp,
-                                notSyncedDataSetCodes,
-                                blackListedDataSetCodes,
-                                notSyncedAttachmentHolderCodes,
-                                context, config,
-                                operationLog);
-                Date resourceListTimestamp = synchronizer.syncronizeEntities();
-                if (resourceListTimestamp.before(newCutOffTimestamp))
-                {
-                    newCutOffTimestamp = resourceListTimestamp;
-                }
-                Date newLastIncSyncTimestamp = newCutOffTimestamp;
-                Date newLastFullSyncTimestamp = timestamps.lastFullSyncTimestamp;
-                if (isFullSync == true)
-                {
-                    newLastFullSyncTimestamp = newCutOffTimestamp;
-                }
-
-                if (config.isDryRun() == false)
-                {
-                    operationLog.info("Saving the timestamp of sync start to file");
-                    saveSyncTimestamp(lastSyncTimestampFile, newLastIncSyncTimestamp, newLastFullSyncTimestamp);
-                }
-                else
+                    String notSyncedEntitiesFileName = config.getNotSyncedEntitiesFileName();
+                    Set<String> notSyncedDataSetCodes = getNotSyncedDataSetCodes(notSyncedEntitiesFileName);
+                    Set<String> notSyncedAttachmentHolderCodes = getNotSyncedAttachmentHolderCodes(notSyncedEntitiesFileName);
+                    Set<String> blackListedDataSetCodes = getBlackListedDataSetCodes(notSyncedEntitiesFileName);
+                    
+                    Date newCutOffTimestamp = new Date();
+                    
+                    EntitySynchronizer synchronizer =
+                            new EntitySynchronizer(service, dataStoreCode, storeRoot, cutOffTimestamp, timestamps.lastIncSyncTimestamp,
+                                    notSyncedDataSetCodes,
+                                    blackListedDataSetCodes,
+                                    notSyncedAttachmentHolderCodes,
+                                    context, config, logger);
+                    Date resourceListTimestamp = synchronizer.synchronizeEntities();
+                    if (resourceListTimestamp.before(newCutOffTimestamp))
+                    {
+                        newCutOffTimestamp = resourceListTimestamp;
+                    }
+                    Date newLastIncSyncTimestamp = newCutOffTimestamp;
+                    Date newLastFullSyncTimestamp = timestamps.lastFullSyncTimestamp;
+                    if (isFullSync == true)
+                    {
+                        newLastFullSyncTimestamp = newCutOffTimestamp;
+                    }
+                    
+                    if (config.isDryRun() == false)
+                    {
+                        logger.info("Saving the timestamp of sync start to file");
+                        saveSyncTimestamp(lastSyncTimestampFile, newLastIncSyncTimestamp, newLastFullSyncTimestamp);
+                    }
+                    else
+                    {
+                        logger.info("Dry run finished");
+                    }
+                    
+                } catch (Exception e)
                 {
-                    operationLog.info("Dry run finished");
+                    logger.error("Sync failed: ", e);
+                    sendErrorEmail(config, "Synchronization failed");
                 }
-
-                operationLog.info(this.getClass() + " finished executing.");
-            } catch (Exception e)
-            {
-                operationLog.error("Sync failed: ", e);
-                sendErrorEmail(config, "Synchronization failed");
             }
+        } catch (Exception e)
+        {
+            operationLog.error("", e);
+            return;
+        }
+        operationLog.info(this.getClass() + " finished executing.");
+    }
+
+    private Logger createLogger(SyncConfig config)
+    {
+        Logger logger = Logger.getLogger(LogFactory.getLoggerName(LogCategory.OPERATION, EntitySynchronizer.class)
+                + "." + config.getDataSourceAlias());
+        String name = "bdfile";
+        if (logger.getAppender(name) == null)
+        {
+            // configure the appender
+            DailyRollingFileAppender console = new DailyRollingFileAppender(); // create appender
+            console.setName(name);
+            String PATTERN = "%d %-5p [%t] %c - %m%n";
+            console.setLayout(new PatternLayout(PATTERN));
+            // console.setThreshold(Level.FATAL);
+            console.setAppend(true);// set to false to overwrite log at every start
+            console.setFile(config.getLogFilePath());
+            console.activateOptions();
+            // add appender to any Logger (here is root)
+            logger.addAppender(console);
+            logger.setAdditivity(false);
         }
+        return logger;
     }
 
     private Timestamps loadCutOffTimeStamps(File lastSyncTimestampFile) throws IOException, ParseException
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
index 2ca04f240ff1e658e0b7c158cc3c250b55fada8e..d00fbc51701533acc4cded48f96d8d86ef878087 100644
--- a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
@@ -22,10 +22,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.regex.Pattern;
 
-import org.apache.log4j.DailyRollingFileAppender;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-
 import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
 import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Space;
@@ -107,7 +103,7 @@ public class SynchronizationConfigReader
     private static final String LOG_FILE_PROPERTY_NAME = "log-file";
 
 
-    public List<SyncConfig> readConfiguration(File harvesterConfigFile, Logger logger) throws IOException
+    public static List<SyncConfig> readConfiguration(File harvesterConfigFile) throws IOException
     {
         List<SyncConfig> configs = new ArrayList<>();
         ConfigReader reader = new ConfigReader(harvesterConfigFile);
@@ -120,10 +116,6 @@ public class SynchronizationConfigReader
             config.setDataSourceAlias(reader.getString(section, DATA_SOURCE_ALIAS_PROPERTY_NAME, section, false));
             String defaultLogFilePath = DEFAULT_LOG_FILE_PATH.replaceFirst(Pattern.quote("{alias}"), config.getDataSourceAlias());
             config.setLogFilePath(reader.getString(section, LOG_FILE_PROPERTY_NAME, defaultLogFilePath, false));
-            if (config.getLogFilePath() != null)
-            {
-                configureFileAppender(config, logger);
-            }
             config.setDataSourceURI(reader.getString(section, DATA_SOURCE_URL_PROPERTY_NAME, null, true));
             config.setDataSourceOpenbisURL(reader.getString(section, DATA_SOURCE_OPENBIS_URL_PROPERTY_NAME, null, true));
             config.setDataSourceDSSURL(reader.getString(section, DATA_SOURCE_DSS_URL_PROPERTY_NAME, null, true));
@@ -190,23 +182,7 @@ public class SynchronizationConfigReader
         return configs;
     }
 
-    private void configureFileAppender(SyncConfig config, Logger logger)
-    {
-        DailyRollingFileAppender console = new DailyRollingFileAppender(); // create appender
-        // configure the appender
-        console.setName("bdfile");
-        String PATTERN = "%d %-5p [%t] %c - %m%n";
-        console.setLayout(new PatternLayout(PATTERN));
-        // console.setThreshold(Level.FATAL);
-        console.setAppend(true);// set to false to overwrite log at every start
-        console.setFile(config.getLogFilePath());
-        console.activateOptions();
-        // add appender to any Logger (here is root)
-        logger.addAppender(console);
-        logger.setAdditivity(false);
-    }
-
-    private void createDataSourceToHarvesterSpaceMappings(SyncConfig config)
+    private static void createDataSourceToHarvesterSpaceMappings(SyncConfig config)
     {
         List<String> dataSourceSpaceList = config.getDataSourceSpaces();
         List<String> harvesterSpaceList = config.getHarvesterSpaces();
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
index 77b7fd670f0fe89d628476a015918af4f52d7668..d28c9c4ddf042a7f9ea6add1bcbafda78d308e6b 100644
--- a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
@@ -38,7 +38,9 @@ import org.apache.commons.collections4.map.MultiKeyMap;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.DailyRollingFileAppender;
 import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
 import org.w3c.dom.Document;
 
 import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi;
@@ -64,6 +66,7 @@ import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.SyncEntityKind
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.entitygraph.EntityGraph;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.entitygraph.IEntityRetriever;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.entitygraph.INode;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.HarvesterMaintenanceTask;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.ParallelizedExecutionPreferences;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.SyncConfig;
 import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.Connection;
@@ -88,6 +91,8 @@ import ch.systemsx.cisd.cifex.shared.basic.UserFailureException;
 import ch.systemsx.cisd.common.concurrent.ParallelizedExecutor;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
 import ch.systemsx.cisd.common.logging.Log4jSimpleLogger;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
 import ch.systemsx.cisd.etlserver.registrator.api.v1.impl.ConversionUtils;
 import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetDirectoryProvider;
 import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetProcessingContext;
@@ -184,13 +189,14 @@ public class EntitySynchronizer
         this.operationLog = operationLog;
     }
 
-    public Date syncronizeEntities() throws Exception
+    public Date synchronizeEntities() throws Exception
     {
-        DataSourceConnector dataSourceConnector = new DataSourceConnector(config.getDataSourceURI(), config.getAuthenticationCredentials(), operationLog);
-        return syncronizeEntities(dataSourceConnector);
+        DataSourceConnector dataSourceConnector =
+                new DataSourceConnector(config.getDataSourceURI(), config.getAuthenticationCredentials(), operationLog);
+        return synchronizeEntities(dataSourceConnector);
     }
 
-    private Date syncronizeEntities(IDataSourceConnector dataSourceConnector) throws Exception
+    private Date synchronizeEntities(IDataSourceConnector dataSourceConnector) throws Exception
     {
         // retrieve the document from the data source
         operationLog.info("\n");
@@ -262,13 +268,11 @@ public class EntitySynchronizer
             AttachmentSynchronizationSummary syncSummary = processAttachments(attachmentHoldersToProcess);
             notSyncedAttachmentsHolders = syncSummary.notRegisteredAttachmentHolderCodes;
             operationLog.info("Attachment synchronization summary:\n" + syncSummary.addedCount + " attachment(s) were added.\n"
-                    + syncSummary.updatedCount
-                    + " attachment(s) were updated.\n"
-                    + syncSummary.deletedCount
-                    + " attachment(s) were deleted.\n"
-                    + "attachments for "
-                    + notSyncedAttachmentsHolders.size()
-                    + " entitities FAILED synchronization. ");
+                    + syncSummary.updatedCount + " attachment(s) were updated.\n"
+                    + syncSummary.deletedCount + " attachment(s) were deleted.\n"
+                    + (notSyncedAttachmentsHolders.isEmpty() ? ""
+                            : "synchronization of attachments for "
+                                    + notSyncedAttachmentsHolders.size() + " entitities FAILED."));
         }
 
         // register physical data sets without any hierarchy
@@ -293,11 +297,12 @@ public class EntitySynchronizer
             saveFailedEntitiesFile(dsRegistrationSummary.notRegisteredDataSetCodes, notSyncedAttachmentsHolders);
 
             notRegisteredDataSetCodes = dsRegistrationSummary.notRegisteredDataSetCodes;
-            operationLog.info("Data set synchronization summary:\n" + dsRegistrationSummary.addedDsCount + " data set(s) were added.\n"
-                    + dsRegistrationSummary.updatedDsCount
-                    + " data set(s) were updated.\n"
-                    + notRegisteredDataSetCodes.size()
-                    + " data set(s) FAILED to register.\n"
+            operationLog.info("Data set synchronization summary:\n"
+                    + dsRegistrationSummary.addedDsCount + " data set(s) were added.\n"
+                    + dsRegistrationSummary.updatedDsCount + " data set(s) were updated.\n"
+                    + (notRegisteredDataSetCodes.isEmpty() ? ""
+                            : notRegisteredDataSetCodes.size()
+                                    + " data set(s) FAILED to register.\n")
                     + blackListedDataSetCodes.size() + " data set(s)"
                     + " were skipped because they were BLACK-LISTED.");
         }
diff --git a/integration-tests/source/systemtest/testcase.py b/integration-tests/source/systemtest/testcase.py
index 98057ed32855f9c67840bcbaddf79fe05bc985e7..7412d672e7e567a375dda8051a7a2e1189103e85 100644
--- a/integration-tests/source/systemtest/testcase.py
+++ b/integration-tests/source/systemtest/testcase.py
@@ -696,13 +696,18 @@ class OpenbisController(_Controller):
         destination = "%s/%s" % (corePluginsFolder, self.instanceName)
         shutil.rmtree(destination, ignore_errors=True)
         shutil.copytree("%s/core-plugins/%s" % (self.templatesFolder, self.instanceName), destination)
+        self.enableCorePlugin(self.instanceName)
+        
+    def enableCorePlugin(self, pluginName):
+        corePluginsFolder = "%s/servers/core-plugins" % self.installPath
         corePluginsPropertiesFile = "%s/core-plugins.properties" % corePluginsFolder
         corePluginsProperties = util.readProperties(corePluginsPropertiesFile)
         enabledModules = corePluginsProperties['enabled-modules']
-        enabledModules = "%s, %s" % (enabledModules, self.instanceName) if len(enabledModules) > 0 else self.instanceName
+        enabledModules = "%s, %s" % (enabledModules, pluginName) if len(enabledModules) > 0 else pluginName
         corePluginsProperties['enabled-modules'] = enabledModules
         util.writeProperties(corePluginsPropertiesFile, corePluginsProperties)
         
+        
     def _setUpStore(self):
         templateStore = "%s/stores/%s" % (self.templatesFolder, self.instanceName)
         if os.path.isdir(templateStore):
diff --git a/integration-tests/test_openbis_sync.py b/integration-tests/test_openbis_sync.py
index 834850e86a25c902f06abc512e85a8f81cfb0eba..c3426a623a3a82882348fa1a5e0fd0e9139ef6a1 100755
--- a/integration-tests/test_openbis_sync.py
+++ b/integration-tests/test_openbis_sync.py
@@ -53,6 +53,7 @@ class TestCase(systemtest.testcase.TestCase):
         openbis1.createTestDatabase('openbis')
         openbis1.createTestDatabase('pathinfo')
         openbis1.createTestDatabase('imaging')
+        openbis1.enableCorePlugin("openbis-sync")
 
         '''Copy master data script'''
         filePath = "%s/servers/core-plugins/%s/1/as" % (openbis1.installPath, openbis1.instanceName)
@@ -80,6 +81,7 @@ class TestCase(systemtest.testcase.TestCase):
         openbis2.createTestDatabase('pathinfo')
         openbis2.createTestDatabase('imaging')
         openbis2.createTestDatabase('proteomics')
+        openbis2.enableCorePlugin("openbis-sync")
         
         '''set openbis2 as harvester'''
         source = self.getHarvesterConfigFolder()
@@ -293,7 +295,7 @@ class TestCase(systemtest.testcase.TestCase):
         base64string = base64.encodestring('%s:%s' % (user, password)).replace('\n', '')
         request.add_header("Authorization", "Basic %s" % base64string)
         data = urllib.urlencode({'mode' : 'test'})
-        response = urllib2.urlopen(request, data)
+        response = urllib2.urlopen(request, data, context=ssl._create_unverified_context())
         return response
 
     def getHarvesterConfigFolder(self):
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/initialize-master-data.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/initialize-master-data.py
index f5e98d86cee130cd9075b0874cbdc4c725772d47..d6e509205474afe3602829eedaca579aeb2eeee9 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/initialize-master-data.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/initialize-master-data.py
@@ -21,7 +21,7 @@ from ch.ethz.sis.openbis.generic.asapi.v3.dto.operation import SynchronousOperat
 from ch.ethz.sis.openbis.generic.server.asapi.v3 import ApplicationServerApi
 from ch.systemsx.cisd.openbis.generic.server import CommonServiceProvider
 from parsers import ExcelToPoiParser, PoiToDefinitionParser, DefinitionToCreationParser, DuplicatesHandler, CreationToOperationParser
-from processors import OpenbisDuplicatesHandler
+from processors import OpenbisDuplicatesHandler, VocabularyLabelHandler
 from search_engines import SearchEngine
 from utils.file_handling import list_xls_files
 
@@ -44,8 +44,8 @@ existing_elements = search_engine.find_all_existing_elements(distinct_creations)
 server_duplicates_handler = OpenbisDuplicatesHandler(distinct_creations, existing_elements)
 creations = server_duplicates_handler.remove_existing_elements_from_creations()
 creations = server_duplicates_handler.rewrite_parentchild_creationid_to_permid()
-# creations = server_duplicates_handler.rewrite_vocabulary_labels()
-
+entity_types = search_engine.find_existing_vocabularies_in_entity_definitions(creations)
+creations = VocabularyLabelHandler.rewrite_vocabularies(creations, entity_types)
 operations = CreationToOperationParser.parse(creations)
 result = api.executeOperations(sessionToken, operations, SynchronousOperationExecutionOptions())
 print("========================eln-life-sciences-types xls ingestion result========================")
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/__init__.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/__init__.py
index 870975eb874cee348b26c457931b32da2f06a970..81cd7918dca6e5663109e635fa7178183f3dbccf 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/__init__.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/__init__.py
@@ -1 +1,2 @@
-from .openbis_duplicates_processor import OpenbisDuplicatesHandler
\ No newline at end of file
+from .openbis_duplicates_processor import OpenbisDuplicatesHandler
+from .vocabulary_label_handler import VocabularyLabelHandler
\ No newline at end of file
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/openbis_duplicates_processor.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/openbis_duplicates_processor.py
index 8b0c3077a0cd6107d972e5428ddd31c6029c17ba..48eff4332708c785220a9e969e8d09bd2ee6081a 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/openbis_duplicates_processor.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/openbis_duplicates_processor.py
@@ -1,11 +1,9 @@
-from ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id import ExperimentPermId
-from ch.ethz.sis.openbis.generic.asapi.v3.dto.project.id import ProjectPermId
-from ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id import SamplePermId, SampleIdentifier
-from ch.ethz.sis.openbis.generic.asapi.v3.dto.space.id import SpacePermId
 from parsers import VocabularyDefinitionToCreationParser, PropertyTypeDefinitionToCreationParser, SampleTypeDefinitionToCreationParser, \
                     ExperimentTypeDefinitionToCreationParser, DatasetTypeDefinitionToCreationParser, SpaceDefinitionToCreationParser, \
                     ProjectDefinitionToCreationParser, ExperimentDefinitionToCreationParser, ScriptDefinitionToCreationParser, SampleDefinitionToCreationParser
 
+from utils.openbis_utils import create_sample_identifier_string
+
 
 class OpenbisDuplicatesHandler(object):
 
@@ -18,30 +16,30 @@ class OpenbisDuplicatesHandler(object):
             for creation in self.creations[ProjectDefinitionToCreationParser.type]:
                 for existing_element in self.existing_elements[SpaceDefinitionToCreationParser.type]:
                     if existing_element.code == creation.spaceId.creationId:
-                        creation.spaceId = SpacePermId(str(existing_element.permId))
+                        creation.spaceId = existing_element.permId
                         break
         if ExperimentDefinitionToCreationParser.type in self.creations:
             for creation in self.creations[ExperimentDefinitionToCreationParser.type]:
                 for existing_element in self.existing_elements[ProjectDefinitionToCreationParser.type]:
                     if existing_element.code == creation.projectId.creationId:
-                        creation.projectId = ProjectPermId(str(existing_element.permId))
+                        creation.projectId = existing_element.permId
                         break
         if SampleDefinitionToCreationParser.type in self.creations:
             for creation in self.creations[SampleDefinitionToCreationParser.type]:
                 if creation.spaceId is not None:
                     for existing_element in self.existing_elements[SpaceDefinitionToCreationParser.type]:
                         if existing_element.code == creation.spaceId.creationId:
-                            creation.spaceId = SpacePermId(str(existing_element.permId))
+                            creation.spaceId = existing_element.permId
                             break
                 if creation.projectId is not None:
                     for existing_element in self.existing_elements[ProjectDefinitionToCreationParser.type]:
                         if existing_element.code == creation.projectId.creationId:
-                            creation.projectId = ProjectPermId(str(existing_element.permId))
+                            creation.projectId = existing_element.permId
                             break
                 if creation.experimentId is not None:
                     for existing_element in self.existing_elements[ExperimentDefinitionToCreationParser.type]:
                         if existing_element.code == creation.experimentId.creationId:
-                            creation.experimentId = ExperimentPermId(str(existing_element.permId))
+                            creation.experimentId = existing_element.permId
                             break
 
                 rewritten_children = []
@@ -86,7 +84,7 @@ class OpenbisDuplicatesHandler(object):
         for creations_type, existing_elements in self.existing_elements.items():
             if creations_type == SampleDefinitionToCreationParser.type:
                 existing_object_codes = [object.identifier.identifier for object in existing_elements]
-                self.creations[creations_type] = list(filter(lambda creation: creation.code is None or self._create_sample_identifier_string(creation) not in existing_object_codes, self.creations[creations_type]))
+                self.creations[creations_type] = list(filter(lambda creation: creation.code is None or create_sample_identifier_string(creation) not in existing_object_codes, self.creations[creations_type]))
             else:
                 distinct_property_name = self._get_distinct_property_name(creations_type)
                 self.creations[creations_type] = self._filter_creations_from_existing_objects(creations_type, existing_elements, distinct_property_name)
@@ -98,13 +96,6 @@ class OpenbisDuplicatesHandler(object):
         else:
             return 'code'
 
-    def _create_sample_identifier_string(self, creation):
-        spaceId = creation.spaceId.creationId if creation.spaceId is not None else None
-        projectId = creation.projectId.creationId if creation.projectId is not None else None
-        code = creation.code
-        sample_identifier = SampleIdentifier(spaceId, projectId, None, code)
-        return sample_identifier.identifier
-
     def _filter_creations_from_existing_objects(self, creations_type, existing_objects, attr):
         existing_object_codes = [getattr(object, attr) for object in existing_objects]
         return list(filter(lambda creation: getattr(creation, attr) not in existing_object_codes, self.creations[creations_type]))
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/vocabulary_label_handler.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/vocabulary_label_handler.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d3721fc2cc3666e5387f6494a1df9a5a2906290
--- /dev/null
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/processors/vocabulary_label_handler.py
@@ -0,0 +1,31 @@
+from ch.ethz.sis.openbis.generic.asapi.v3.dto.property import DataType
+
+
+class VocabularyLabelHandler(object):
+
+    @staticmethod
+    def rewrite_vocabularies(creations_map, entity_types):
+        for creations_type, creations in creations_map.iteritems():
+            if creations_type in entity_types:
+                matching_entity_types = entity_types[creations_type]
+                for creation in creations:
+                    for matching_entity_type in matching_entity_types:
+                        if creation.typeId.permId == matching_entity_type.permId.permId:
+                            vocabulary_property_types = VocabularyLabelHandler.extract_vocabulary_from_entity_type(matching_entity_type)
+                            for vocabulary_property_type in vocabulary_property_types:
+                                if vocabulary_property_type.code.lower() in creation.properties:
+                                    vocabulary_label = creation.properties[vocabulary_property_type.code.lower()]
+                                    if vocabulary_label is not None:
+                                        for term in vocabulary_property_type.vocabulary.terms:
+                                            if vocabulary_label.lower() == term.label.lower():
+                                                creation.properties[vocabulary_property_type.code.lower()] = term.code
+                                                break
+        return creations_map
+
+    @staticmethod
+    def extract_vocabulary_from_entity_type(matching_entity_type):
+        vocabularies = []
+        for property_assignment in matching_entity_type.propertyAssignments:
+            if property_assignment.propertyType.dataType == DataType.CONTROLLEDVOCABULARY:
+                vocabularies.append(property_assignment.propertyType)
+        return vocabularies
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_criteria_factory.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_criteria_factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..103a8066763cb35c9c76767ff835e6c1bd94d383
--- /dev/null
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_criteria_factory.py
@@ -0,0 +1,67 @@
+class SampleCreationSampleSearchCriteria(object):
+
+    def __init__(self, search_criteria_class):
+        self.search_criteria_class = search_criteria_class
+
+    def get_search_criteria(self, specific_creations):
+        search_criterias = []
+        for creation in specific_creations:
+            search_criteria = self.search_criteria_class()
+            search_criteria.withAndOperator()
+            if creation.code is not None:
+                search_criteria.withCode().thatEquals(creation.code)
+                if creation.experimentId is not None:
+                    search_criteria.withExperiment().withCode().thatEquals(creation.experimentId.creationId)
+                else:
+                    search_criteria.withoutExperiment()
+
+                if creation.projectId is not None:
+                    search_criteria.withProject().withCode().thatEquals(creation.projectId.creationId)
+                else:
+                    search_criteria.withoutProject()
+
+                if creation.spaceId is not None:
+                    search_criteria.withSpace().withCode().thatEquals(creation.spaceId.creationId)
+                else:
+                    search_criteria.withoutSpace()
+
+                search_criterias.append(search_criteria)
+
+        return search_criterias
+
+
+class DefaultCreationElementSearchCriteria(object):
+
+    def __init__(self, search_criteria_class):
+        self.search_criteria = search_criteria_class()
+
+    def get_search_criteria(self, specific_creations):
+        if 'withCodes' in dir(self.search_criteria):
+            self.search_criteria.withCodes().thatIn([creation.code for creation in specific_creations])
+        else:
+            for creation in specific_creations:
+                self.search_criteria.withCode().thatEquals(creation.code)
+            self.search_criteria.withOrOperator()
+        return self.search_criteria
+
+
+class ScriptCreationScriptSearchCriteria(object):
+
+    def __init__(self, search_criteria_class):
+        self.search_criteria = search_criteria_class()
+
+    def get_search_criteria(self, specific_creations):
+        for creation in specific_creations:
+            self.search_criteria.withName().thatEquals(creation.name)
+        self.search_criteria.withOrOperator()
+        return self.search_criteria
+
+
+class EntityCreationEntityTypeSearchCriteria(object):
+
+    def __init__(self, search_criteria_class):
+        self.search_criteria = search_criteria_class()
+
+    def get_search_criteria(self, specific_creations):
+        self.search_criteria.withCodes().thatIn([creation.typeId.permId for creation in specific_creations])
+        return self.search_criteria
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_engine.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_engine.py
index 42634e1ef274b9b68f57a1c759bb91944a021527..bce14cb6e910836254081def08d273f48cef4842 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_engine.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/search_engines/search_engine.py
@@ -22,6 +22,7 @@ from ch.ethz.sis.openbis.generic.asapi.v3.dto.vocabulary.search import Vocabular
 from parsers import VocabularyDefinitionToCreationParser, PropertyTypeDefinitionToCreationParser, SampleTypeDefinitionToCreationParser, \
                     ExperimentTypeDefinitionToCreationParser, DatasetTypeDefinitionToCreationParser, SpaceDefinitionToCreationParser, \
                     ProjectDefinitionToCreationParser, ExperimentDefinitionToCreationParser, ScriptDefinitionToCreationParser, SampleDefinitionToCreationParser
+from search_criteria_factory import DefaultCreationElementSearchCriteria, SampleCreationSampleSearchCriteria, ScriptCreationScriptSearchCriteria, EntityCreationEntityTypeSearchCriteria
 
 
 class SearchEngine():
@@ -30,127 +31,141 @@ class SearchEngine():
             self.api = api
             self.session_token = sesstion_token
 
+    def find_existing_vocabularies_in_entity_definitions(self, creations):
+        experiment_fetch_options = ExperimentTypeFetchOptions()
+        experiment_fetch_options.withPropertyAssignments().withPropertyType().withVocabulary().withTerms()
+        sample_fetch_options = SampleTypeFetchOptions()
+        sample_fetch_options.withPropertyAssignments().withPropertyType().withVocabulary().withTerms()
+
+        search_strategy = [
+            {
+            'creations_type': SampleDefinitionToCreationParser.type,
+            'search_criteria_build_strategy' : EntityCreationEntityTypeSearchCriteria,
+            'search_criteria_class' : SampleTypeSearchCriteria,
+            'search_operation': SearchSampleTypesOperation,
+            'fetch_options': sample_fetch_options
+            },
+            {
+            'creations_type': ExperimentDefinitionToCreationParser.type,
+            'search_criteria_build_strategy' : EntityCreationEntityTypeSearchCriteria,
+            'search_criteria_class' : ExperimentTypeSearchCriteria,
+            'search_operation': SearchExperimentTypesOperation,
+            'fetch_options': experiment_fetch_options
+            }
+        ]
+
+        existing_elements = {}
+        for strategy in search_strategy:
+            creations_type = strategy['creations_type']
+            search_criteria_class = strategy['search_criteria_class']
+            search_criteria_builder = strategy['search_criteria_build_strategy'](search_criteria_class)
+            if creations_type in creations:
+                search_criterias = search_criteria_builder.get_search_criteria(creations[creations_type])
+                existing_specific_elements = self._get_existing_elements(search_criterias, **strategy)
+                if existing_specific_elements is not None:
+                    existing_elements[creations_type] = existing_specific_elements
+
+        return existing_elements
+
     def find_all_existing_elements(self, creations):
         search_strategy = [
             {
             'creations_type': VocabularyDefinitionToCreationParser.type,
-            'search_criteria_object' : VocabularySearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : VocabularySearchCriteria,
             'search_operation':SearchVocabulariesOperation,
-            'fetch_options':VocabularyFetchOptions
+            'fetch_options':VocabularyFetchOptions()
             },
             {
             'creations_type': PropertyTypeDefinitionToCreationParser.type,
-            'search_criteria_object' : PropertyTypeSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : PropertyTypeSearchCriteria,
             'search_operation': SearchPropertyTypesOperation,
-            'fetch_options': PropertyTypeFetchOptions
+            'fetch_options': PropertyTypeFetchOptions()
             },
             {
             'creations_type': SampleTypeDefinitionToCreationParser.type,
-            'search_criteria_object' : SampleTypeSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : SampleTypeSearchCriteria,
             'search_operation': SearchSampleTypesOperation,
-            'fetch_options': SampleTypeFetchOptions
+            'fetch_options': SampleTypeFetchOptions()
             },
             {
             'creations_type': ExperimentTypeDefinitionToCreationParser.type,
-            'search_criteria_object' : ExperimentTypeSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : ExperimentTypeSearchCriteria,
             'search_operation': SearchExperimentTypesOperation,
-            'fetch_options': ExperimentTypeFetchOptions
+            'fetch_options': ExperimentTypeFetchOptions()
             },
             {
             'creations_type': DatasetTypeDefinitionToCreationParser.type,
-            'search_criteria_object' : DataSetTypeSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : DataSetTypeSearchCriteria,
             'search_operation': SearchDataSetTypesOperation,
-            'fetch_options': DataSetTypeFetchOptions
+            'fetch_options': DataSetTypeFetchOptions()
             },
             {
             'creations_type': SpaceDefinitionToCreationParser.type,
-            'search_criteria_object' : SpaceSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : SpaceSearchCriteria,
             'search_operation': SearchSpacesOperation,
-            'fetch_options': SpaceFetchOptions
+            'fetch_options': SpaceFetchOptions()
             },
             {
             'creations_type': ProjectDefinitionToCreationParser.type,
-            'search_criteria_object' : ProjectSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : ProjectSearchCriteria,
             'search_operation': SearchProjectsOperation,
-            'fetch_options': ProjectFetchOptions
+            'fetch_options': ProjectFetchOptions()
             },
             {
             'creations_type': ExperimentDefinitionToCreationParser.type,
-            'search_criteria_object' : ExperimentSearchCriteria,
+            'search_criteria_build_strategy' : DefaultCreationElementSearchCriteria,
+            'search_criteria_class' : ExperimentSearchCriteria,
             'search_operation': SearchExperimentsOperation,
-            'fetch_options': ExperimentFetchOptions
+            'fetch_options': ExperimentFetchOptions()
             },
             {
             'creations_type': SampleDefinitionToCreationParser.type,
-            'search_criteria_object' : SampleSearchCriteria,
+            'search_criteria_build_strategy' : SampleCreationSampleSearchCriteria,
+            'search_criteria_class' : SampleSearchCriteria,
             'search_operation': SearchSamplesOperation,
-            'fetch_options': SampleFetchOptions
+            'fetch_options': SampleFetchOptions()
             },
             {
             'creations_type': ScriptDefinitionToCreationParser.type,
-            'search_criteria_object' : PluginSearchCriteria,
+            'search_criteria_build_strategy' : ScriptCreationScriptSearchCriteria,
+            'search_criteria_class': PluginSearchCriteria,
             'search_operation': SearchPluginsOperation,
-            'fetch_options': PluginFetchOptions
+            'fetch_options': PluginFetchOptions()
             }
         ]
 
         existing_elements = {}
         for strategy in search_strategy:
             creations_type = strategy['creations_type']
+            search_criteria_class = strategy['search_criteria_class']
+            search_criteria_builder = strategy['search_criteria_build_strategy'](search_criteria_class)
             if creations_type in creations:
-                existing_specific_elements = self._get_existing_elements(creations=creations, **strategy)
+                search_criterias = search_criteria_builder.get_search_criteria(creations[creations_type])
+                existing_specific_elements = self._get_existing_elements(search_criterias, **strategy)
                 if existing_specific_elements is not None:
                     existing_elements[creations_type] = existing_specific_elements
         return existing_elements
 
-    def _get_existing_elements(self, creations, creations_type, search_criteria_object, search_operation, fetch_options):
-        search_criteria = self._get_search_criteria(creations_type, creations[creations_type], search_criteria_object)
-        if search_criteria is []:
+    def _get_existing_elements(self, search_criterias, **kwargs):
+        search_operation = kwargs['search_operation']
+        fetch_options = kwargs['fetch_options']
+        if not search_criterias:
             return None
-        result = self._execute_search_operation(search_operation(search_criteria, fetch_options()))
-        return result.getObjects()
-
-    def _get_search_criteria(self, creations_type, specific_creations, search_criteria_class):
-        search_criteria = search_criteria_class()
-
-        if creations_type == SampleDefinitionToCreationParser.type:
-            search_criterias = []
-            for creation in specific_creations:
-                search_criteria.withOrOperator()
-                if creation.code is not None:
-                    search_criteria.withCode().thatEquals(creation.code)
-                    if creation.experimentId is not None:
-                        search_criteria.withExperiment().withCode().thatEquals(creation.experimentId.creationId)
-                    else:
-                        search_criteria.withoutExperiment()
-
-                    if creation.projectId is not None:
-                        search_criteria.withProject().withCode().thatEquals(creation.projectId.creationId)
-                    else:
-                        search_criteria.withoutProject()
-
-                    if creation.spaceId is not None:
-                        search_criteria.withSpace().withCode().thatEquals(creation.spaceId.creationId)
-                    else:
-                        search_criteria.withoutSpace()
-
-                    search_criterias.append(search_criteria)
-            return search_criteria
-
-        if 'withCodes' in dir(search_criteria):
-            search_criteria.withCodes().thatIn([creation.code for creation in specific_creations])
-        elif 'withName' in dir(search_criteria):
-            for creation in specific_creations:
-                search_criteria.withName().thatEquals(creation.name)
-            search_criteria.withOrOperator()
-        else:
-            for creation in specific_creations:
-                search_criteria.withCode().thatEquals(creation.code)
-            search_criteria.withOrOperator()
-        return search_criteria
+        search_criterias = search_criterias if type(search_criterias) == list else [search_criterias]
+        operations = [search_operation(search_criteria, fetch_options) for search_criteria in search_criterias]
+        return self._execute_search_operation(operations)
 
-    def _execute_search_operation(self, operation):
-        operations = []
-        operations.extend(operation if type(operation) == list else [operation])
-        return self.api.executeOperations(self.session_token, operations, SynchronousOperationExecutionOptions()).getResults().get(0).getSearchResult()
+    def _execute_search_operation(self, operations):
+        execution_results = self.api.executeOperations(self.session_token, operations, SynchronousOperationExecutionOptions())
+        result_objects = []
+        for search_result in execution_results.getResults():
+            result_objects.extend(search_result.getSearchResult().getObjects())
+        return result_objects
 
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/test_files/types.xls b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/test_files/types.xls
index c2d224eb2ae234f8e00d673d04f887f8cc7922ea..234d3ed935ae7cdbcc78446c9db21dd74a96d587 100644
Binary files a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/test_files/types.xls and b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/test_files/types.xls differ
diff --git a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/utils/openbis_utils.py b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/utils/openbis_utils.py
index 994918600bda0c3678fd20b93c490cd5a9e55ed2..6f55862235908aafe39b67b772ea52fbeac74f1e 100644
--- a/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/utils/openbis_utils.py
+++ b/openbis_standard_technologies/dist/core-plugins/eln-lims-life-sciences/1/as/utils/openbis_utils.py
@@ -1,3 +1,4 @@
+from ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id import SampleIdentifier
 from utils.file_handling import get_filename_from_path
 
 
@@ -7,3 +8,11 @@ def is_internal_namespace(property_value):
 
 def get_script_name_for(owner_code, script_path):
         return owner_code + '.' + get_filename_from_path(script_path)
+
+
+def create_sample_identifier_string(sample_creation):
+        spaceId = sample_creation.spaceId.creationId if sample_creation.spaceId is not None else None
+        projectId = sample_creation.projectId.creationId if sample_creation.projectId is not None else None
+        code = sample_creation.code
+        sample_identifier = SampleIdentifier(spaceId, projectId, None, code)
+        return sample_identifier.identifier