From 359e6dc9536423998484a6a539b206edb11674b2 Mon Sep 17 00:00:00 2001
From: gakin <gakin>
Date: Wed, 11 Jan 2017 09:43:02 +0000
Subject: [PATCH] SSDM-4591 : Re-organize OpenbisSync related classes to make
 it easier for build scripts

SVN: 37582
---
 .../plugins/sync/common}/EntityRetriever.java |    2 +-
 .../sync/common/ServiceFinderUtils.java       |   46 +
 .../ResourceSyncRequestHandler.java           |   48 +
 .../harvester/HarvesterMaintenanceTask.java   |  265 ++++
 .../config/BasicAuthCredentials.java          |   68 +
 .../sync/harvester/config/ConfigReader.java   |  196 +++
 .../sync/harvester/config/SyncConfig.java     |  235 ++++
 .../config/SynchronizationConfigReader.java   |  167 +++
 .../harvester/synchronizer/DSSFileUtils.java  |   80 ++
 .../DataSetRegistrationIngestionService.java  |  231 ++++
 .../synchronizer/EntitySynchronizer.java      | 1102 +++++++++++++++++
 .../synchronizer/MasterDataParser.java        |  209 ++++
 .../synchronizer/ResourceListParser.java      |  519 ++++++++
 .../synchronizer/ResourceListParserData.java  |  356 ++++++
 .../DataSourceConnector.java                  |  117 ++
 .../IDataSourceConnector.java                 |   31 +
 .../translator/CustomNameTranslator.java      |   53 +
 .../translator/DefaultNameTranslator.java     |   32 +
 .../translator/INameTranslator.java           |   27 +
 .../translator/PrefixBasedNameTranslator.java |   39 +
 20 files changed, 3822 insertions(+), 1 deletion(-)
 rename {openbis/source/java/ch/ethz/sis/openbis/generic/server => datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common}/EntityRetriever.java (99%)
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/ServiceFinderUtils.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/datasource/ResourceSyncRequestHandler.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/BasicAuthCredentials.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/ConfigReader.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SyncConfig.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DSSFileUtils.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DataSetRegistrationIngestionService.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/MasterDataParser.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParser.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParserData.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/DataSourceConnector.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/IDataSourceConnector.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/CustomNameTranslator.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/DefaultNameTranslator.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/INameTranslator.java
 create mode 100644 datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/PrefixBasedNameTranslator.java

diff --git a/openbis/source/java/ch/ethz/sis/openbis/generic/server/EntityRetriever.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/EntityRetriever.java
similarity index 99%
rename from openbis/source/java/ch/ethz/sis/openbis/generic/server/EntityRetriever.java
rename to datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/EntityRetriever.java
index cb9936d0c4d..35a5e2431b3 100644
--- a/openbis/source/java/ch/ethz/sis/openbis/generic/server/EntityRetriever.java
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/EntityRetriever.java
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package ch.ethz.sis.openbis.generic.server;
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common;
 
 import static ch.ethz.sis.openbis.generic.shared.entitygraph.Edge.CHILD;
 import static ch.ethz.sis.openbis.generic.shared.entitygraph.Edge.COMPONENT;
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/ServiceFinderUtils.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/ServiceFinderUtils.java
new file mode 100644
index 00000000000..42f3ea3038d
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/common/ServiceFinderUtils.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common;
+
+import ch.systemsx.cisd.openbis.common.api.client.IServicePinger;
+import ch.systemsx.cisd.openbis.common.api.client.ServiceFinder;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.EncapsulatedCommonServer;
+import ch.systemsx.cisd.openbis.generic.shared.ICommonServer;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class ServiceFinderUtils
+{
+    public static EncapsulatedCommonServer getEncapsulatedCommonServer(String sessionToken, String openBisServerUrl)
+    {
+        ServiceFinder finder = new ServiceFinder("openbis", "/rmi-common");
+        ICommonServer commonServer =
+                finder.createService(ICommonServer.class, openBisServerUrl,
+                        new IServicePinger<ICommonServer>()
+                            {
+                                @Override
+                                public void ping(ICommonServer service)
+                                {
+                                    service.getVersion();
+                                }
+                            });
+        return EncapsulatedCommonServer.create(commonServer, sessionToken);
+    }
+}
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/datasource/ResourceSyncRequestHandler.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/datasource/ResourceSyncRequestHandler.java
new file mode 100644
index 00000000000..e9061c66167
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/datasource/ResourceSyncRequestHandler.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.datasource;
+
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.EntityRetriever;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.ServiceFinderUtils;
+import ch.systemsx.cisd.openbis.dss.generic.server.oaipmh.JythonBasedRequestHandler;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.jython.IRequestHandlerPluginScriptRunner;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IMasterDataRegistrationTransaction;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.EncapsulatedCommonServer;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.MasterDataRegistrationService;
+import ch.systemsx.cisd.openbis.generic.shared.dto.SessionContextDTO;
+
+/**
+ * @author Ganime Betul Akin
+ */
+public class ResourceSyncRequestHandler extends JythonBasedRequestHandler
+{
+    private static final String V3_ENTITY_RETRIEVER_VARIABLE_NAME = "v3EntityRetriever";
+
+    @Override
+    protected void setVariables(IRequestHandlerPluginScriptRunner runner, SessionContextDTO session)
+    {
+        super.setVariables(runner, session);
+        String openBisServerUrl = ServiceProvider.getConfigProvider().getOpenBisServerUrl();
+        EncapsulatedCommonServer encapsulatedServer = ServiceFinderUtils.getEncapsulatedCommonServer(session.getSessionToken(), openBisServerUrl);
+        MasterDataRegistrationService service = new MasterDataRegistrationService(encapsulatedServer);
+        IMasterDataRegistrationTransaction masterDataRegistrationTransaction = service.transaction();
+
+        runner.setVariable(V3_ENTITY_RETRIEVER_VARIABLE_NAME,
+                EntityRetriever.createWithMasterDataRegistationTransaction(ServiceProvider.getV3ApplicationService(), session.getSessionToken(),
+                        masterDataRegistrationTransaction));
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
new file mode 100644
index 00000000000..c773c4ec5a0
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/HarvesterMaintenanceTask.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//TODO should we use permId in hash tables instead of identifier, fore exp in samplesToUpdate
+//TODO try to implement sample relationship sync like DS rel. sync
+//TODO check if already loaded harvesterEntityGraph can be used in most cases
+//TODO check if harvesterEntityGraph can be partially loaded as required
+//TODO correctly handle saving of last sync timestamp
+//TODO different last sync timestamp files for different plugins - 
+//this is actually handled by setting up different harvester plugins with different files
+//TODO when deleting make sure we are not emptying all the trash but just the ones we synchronized
+//TODO checksum checkss for data set files
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.Set;
+
+import javax.activation.DataHandler;
+import javax.activation.DataSource;
+import javax.mail.util.ByteArrayDataSource;
+
+import org.apache.log4j.Logger;
+
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.SyncConfig;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.SynchronizationConfigReader;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.EntitySynchronizer;
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
+import ch.systemsx.cisd.common.mail.EMailAddress;
+import ch.systemsx.cisd.common.mail.IMailClient;
+import ch.systemsx.cisd.common.maintenance.IMaintenanceTask;
+import ch.systemsx.cisd.common.parser.ILine;
+import ch.systemsx.cisd.common.parser.filter.ExcludeEmptyAndCommentLineFilter;
+import ch.systemsx.cisd.common.parser.filter.ILineFilter;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.tasks.PluginTaskInfoProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetProcessingContext;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IConfigProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
+import ch.systemsx.cisd.openbis.dss.generic.shared.utils.DssPropertyParametersUtil;
+
+/**
+ * @author Ganime Betul Akin
+ */
+public class HarvesterMaintenanceTask<T extends DataSetInformation> implements IMaintenanceTask
+{
+    protected static final Logger operationLog =
+            LogFactory.getLogger(LogCategory.OPERATION, HarvesterMaintenanceTask.class);
+
+    final DateFormat formatter = new SimpleDateFormat("dd-MM-yy HH-mm-ss", Locale.ENGLISH);
+
+    private static final String HARVESTER_CONFIG_FILE_PROPERTY_NAME = "harvester-config-file";
+
+    private static final String DEFAULT_HARVESTER_CONFIG_FILE_NAME = "../../harvester-config.txt";
+
+    private File storeRoot;
+
+    private IEncapsulatedOpenBISService service;
+
+    private DataSetProcessingContext context;
+
+    private Date lastSyncTimestamp;
+
+    private File harvesterConfigFile;
+
+    private IMailClient mailClient;
+
+    private String dataStoreCode;
+
+    @Override
+    public void setUp(String pluginName, Properties properties)
+    {
+        service = ServiceProvider.getOpenBISService();
+        context = new DataSetProcessingContext(null, null, null, null, null, null);
+        dataStoreCode = getConfigProvider().getDataStoreCode();
+        storeRoot = new File(DssPropertyParametersUtil.loadServiceProperties().getProperty(PluginTaskInfoProvider.STOREROOT_DIR_KEY));
+        mailClient = ServiceProvider.getDataStoreService().createEMailClient();
+
+
+        String configFileProperty = properties.getProperty(HARVESTER_CONFIG_FILE_PROPERTY_NAME);
+        if (configFileProperty == null)
+        {
+            harvesterConfigFile =
+                    new File(getConfigProvider().getStoreRoot(), DEFAULT_HARVESTER_CONFIG_FILE_NAME);
+        } else
+        {
+            harvesterConfigFile = new File(configFileProperty);
+        }
+    }
+
+    private IConfigProvider getConfigProvider()
+    {
+        return ServiceProvider.getConfigProvider();
+    }
+
+    @Override
+    public void execute()
+    {
+        operationLog.info(this.getClass() + " started.");
+
+        SynchronizationConfigReader syncConfigReader = new SynchronizationConfigReader();
+        List<SyncConfig> configs;
+        try
+        {
+            configs = syncConfigReader.readConfiguration(harvesterConfigFile, operationLog);
+        } catch (Exception e)
+        {
+            operationLog.error("", e);
+            return;
+        }
+
+        for (SyncConfig config : configs)
+        {
+            try
+            {
+                operationLog
+                        .info("Start synchronization from data source: " + config.getDataSourceOpenbisURL() + " for user " + config.getUser());
+
+                String fileName = config.getLastSyncTimestampFileName();
+                File lastSyncTimestampFile = new File(fileName);
+                lastSyncTimestamp = getLastSyncTimeStamp(lastSyncTimestampFile);
+
+                String notSyncedDataSetsFileName = config.getNotSyncedDataSetsFileName();
+                Set<String> notSyncedDataSetCodes = getNotSyncedDataSetCodes(notSyncedDataSetsFileName);
+                Set<String> blackListedDataSetCodes = getBlackListedDataSetCodes(notSyncedDataSetsFileName);
+
+                // save the current time into a temp file as last sync time
+                File newLastSyncTimeStampFile = new File(fileName + ".new");
+                Date syncStartTimestamp = new Date();
+                FileUtilities.writeToFile(newLastSyncTimeStampFile, formatter.format(syncStartTimestamp));
+
+                EntitySynchronizer synchronizer =
+                        new EntitySynchronizer(service, dataStoreCode, storeRoot, lastSyncTimestamp, notSyncedDataSetCodes, blackListedDataSetCodes,
+                                context, config,
+                                operationLog);
+                Date resourceListTimestamp = synchronizer.syncronizeEntities();
+                if (resourceListTimestamp.before(syncStartTimestamp))
+                {
+                    FileUtilities.writeToFile(newLastSyncTimeStampFile, formatter.format(resourceListTimestamp));
+                }
+
+                operationLog.info("Saving the timestamp of sync start to file");
+                saveSyncTimestamp(newLastSyncTimeStampFile, lastSyncTimestampFile);
+
+                operationLog.info(this.getClass() + " finished executing.");
+
+            } catch (Exception e)
+            {
+                operationLog.error("Sync failed: ", e);
+                sendErrorEmail(config, "Synchronization failed");
+            }
+        }
+    }
+
+    private Date getLastSyncTimeStamp(File lastSyncTimestampFile) throws ParseException
+    {
+        if (lastSyncTimestampFile.exists())
+        {
+            String timeStr = FileUtilities.loadToString(lastSyncTimestampFile).trim();
+            return formatter.parse(timeStr);
+        }
+        else
+        {
+            return new Date(0L);
+        }
+    }
+
+    private Set<String> getDataSetCodesFromNotSyncedDataSetsFile(String fileName, ILineFilter linefilter)
+    {
+        File notSyncedDataSetsFile = new File(fileName);
+        if (notSyncedDataSetsFile.exists())
+        {
+            List<String> list = FileUtilities.loadToStringList(notSyncedDataSetsFile, linefilter);
+            return new LinkedHashSet<String>(list);
+        }
+        else
+        {
+            return new LinkedHashSet<String>();
+        }
+    }
+
+    private Set<String> getNotSyncedDataSetCodes(String fileName)
+    {
+        return getDataSetCodesFromNotSyncedDataSetsFile(fileName, ExcludeEmptyAndCommentLineFilter.INSTANCE);
+    }
+
+    private Set<String> getBlackListedDataSetCodes(String fileName)
+    {
+        return getDataSetCodesFromNotSyncedDataSetsFile(fileName, new ILineFilter()
+            {
+                @Override
+                public <T> boolean acceptLine(ILine<T> line)
+                {
+                    assert line != null : "Unspecified line";
+                    final String trimmed = line.getText().trim();
+                    return trimmed.length() > 0 && trimmed.startsWith("#") == true;
+                }
+            });
+    }
+
+    private void sendErrorEmail(SyncConfig config, String subject)
+    {
+        if (config.getLogFilePath() != null)
+        {
+            // send the operation log as attachment
+            DataSource dataSource = createDataSource(config.getLogFilePath()); // /Users/gakin/Documents/sync.log
+            for (EMailAddress recipient : config.getEmailAddresses())
+            {
+                mailClient.sendEmailMessageWithAttachment(subject,
+                        "See the attached file for details.",
+                        "", new DataHandler(
+                                dataSource), null, null, recipient);
+            }
+        }
+        else
+        {
+            for (EMailAddress recipient : config.getEmailAddresses())
+            {
+                mailClient.sendEmailMessage(subject,
+                        "See the data store server log for details.", null, null, recipient);
+            }
+        }
+    }
+
+    private DataSource createDataSource(final String filePath)
+    {
+        try
+        {
+            return new ByteArrayDataSource(new FileInputStream(filePath), "text/plain");
+        } catch (IOException ex)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+        }
+    }
+
+    private void saveSyncTimestamp(File newLastSyncTimeStampFile, File lastSyncTimestampFile)
+    {
+        newLastSyncTimeStampFile.renameTo(lastSyncTimestampFile);
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/BasicAuthCredentials.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/BasicAuthCredentials.java
new file mode 100644
index 00000000000..db80f534564
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/BasicAuthCredentials.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class BasicAuthCredentials
+{
+    private String realm;
+
+    private String user;
+
+    private String password;
+
+    public BasicAuthCredentials(String realm, String user, String pass)
+    {
+        this.realm = realm;
+        this.user = user;
+        this.password = pass;
+    }
+
+    public String getRealm()
+    {
+        return realm;
+    }
+
+    public void setRealm(String realm)
+    {
+        this.realm = realm;
+    }
+
+    public String getUser()
+    {
+        return user;
+    }
+
+    public void setUser(String user)
+    {
+        this.user = user;
+    }
+
+    public String getPassword()
+    {
+        return password;
+    }
+
+    public void setPassword(String pass)
+    {
+        this.password = pass;
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/ConfigReader.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/ConfigReader.java
new file mode 100644
index 00000000000..6c8009cceae
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/ConfigReader.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+
+public class ConfigReader
+{
+
+    private static final String IGNORE_LINE_CHAR = "#";
+
+    private Pattern sectionRegex = Pattern.compile("\\s*\\[([^]]*)\\]\\s*");
+
+    private Pattern keyValueRegex = Pattern.compile("\\s*([^=]*)=(.*)");
+
+    private Map<String, Map<String, String>> entries = new LinkedHashMap<>();
+
+    public ConfigReader(String path) throws IOException
+    {
+        load(path);
+    }
+
+    public ConfigReader(File file) throws IOException
+    {
+        loadFile(file);
+    }
+
+    public static void main(String[] args)
+    {
+        ConfigReader reader;
+        try
+        {
+            reader = new ConfigReader("/Users/gakin/Documents/workspace_openbis_trunk/datastore_server/harvester.ini");
+            for (int i = 0; i < reader.getSectionCount(); i++)
+            {
+                System.out.println(reader.getSection(i));
+            }
+            
+        } catch (IOException e)
+        {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+    }
+
+    public int getSectionCount()
+    {
+        return entries.keySet().size();
+    }
+
+    public String getSection(int index)
+    {
+        if (index > getSectionCount())
+        {
+            throw new RuntimeException("Section with index " + index + " does not exist.");
+        }
+        return entries.keySet().toArray(new String[entries.keySet().size()])[index];
+    }
+
+    public boolean sectionExists(String name)
+    {
+        Map<String, String> kvMap = entries.get(name);
+        if (kvMap == null)
+        {
+            return false;
+        }
+        return true;
+    }
+
+    public void loadFile(File file) throws IOException
+    {
+        try (BufferedReader br = new BufferedReader(new FileReader(file)))
+        {
+            String line;
+            String section = null;
+            while ((line = br.readLine()) != null)
+            {
+                Matcher m = sectionRegex.matcher(line);
+                if (m.matches())
+                {
+                    section = m.group(1).trim();
+                }
+                else if (section != null)
+                {
+                    m = keyValueRegex.matcher(line);
+                    if (m.matches() && line.startsWith(IGNORE_LINE_CHAR) == false)
+                    {
+                        String key = m.group(1).trim();
+                        String value = m.group(2).trim();
+                        Map<String, String> map = entries.get(section);
+                        if (map == null)
+                        {
+                            entries.put(section, map = new HashMap<>());
+                        }
+                        map.put(key, value);
+                    }
+                }
+            }
+        }
+    }
+
+    public void load(String path) throws IOException
+    {
+        loadFile(new File(path));
+    }
+
+    public String getString(String section, String key, String defaultvalue, boolean mandatory)
+    {
+        String val = getValue(section, key);
+        if (val == null)
+        {
+            if (mandatory)
+            {
+                throw new ConfigurationFailureException("Property '" + key + "' in section '" + section + "'  is mandatory.");
+            }
+            return defaultvalue;
+        }
+        return val;
+    }
+
+    private String getValue(String section, String key) throws ConfigurationFailureException
+    {
+        Map<String, String> map = entries.get(section);
+        if (map == null)
+        {
+            throw new ConfigurationFailureException("Section '" + section + " does not exist.");
+        }
+        String val = map.get(key);
+        if (val == null)
+        {
+            return null;
+        }
+        if (val.trim().equals("") == true)
+        {
+            return null;
+        }
+        return val;
+    }
+
+    public int getInt(String section, String key, int defaultvalue, boolean mandatory)
+    {
+        String val = getValue(section, key);
+        if (val == null)
+        {
+            if (mandatory)
+            {
+                throw new ConfigurationFailureException("Property '" + key + "' in section '" + section + "'  is mandatory.");
+            }
+            return defaultvalue;
+        }
+        return Integer.parseInt(val);
+    }
+
+    public double getDouble(String section, String key, double defaultvalue, boolean mandatory)
+    {
+        String val = getValue(section, key);
+        if (val == null)
+        {
+            if (mandatory)
+            {
+                throw new ConfigurationFailureException("Property '" + key + "' in section '" + section + "'  is mandatory.");
+            }
+            return defaultvalue;
+        }
+        return Double.parseDouble(val);
+    }
+}
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SyncConfig.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SyncConfig.java
new file mode 100644
index 00000000000..d50b46594b3
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SyncConfig.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import ch.systemsx.cisd.common.mail.EMailAddress;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class SyncConfig
+{
+    private String dataSourceURI;
+
+    public String getDataSourceURI()
+    {
+        return dataSourceURI;
+    }
+
+    public void setDataSourceURI(String dataSourceURI)
+    {
+        this.dataSourceURI = dataSourceURI;
+    }
+
+    public String getDataSourceOpenbisURL()
+    {
+        return dataSourceOpenbisURL;
+    }
+
+    public void setDataSourceOpenbisURL(String dataSourceOpenbisURL)
+    {
+        this.dataSourceOpenbisURL = dataSourceOpenbisURL;
+    }
+
+    public String getDataSourceDSSURL()
+    {
+        return dataSourceDSSURL;
+    }
+
+    public void setDataSourceDSSURL(String dataSourceDSSURL)
+    {
+        this.dataSourceDSSURL = dataSourceDSSURL;
+    }
+
+    public String getLastSyncTimestampFileName()
+    {
+        return lastSyncTimestampFileName;
+    }
+
+    public void setLastSyncTimestampFileName(String lastSyncTimestampFileName)
+    {
+        this.lastSyncTimestampFileName = lastSyncTimestampFileName;
+    }
+
+    public String getNotSyncedDataSetsFileName()
+    {
+        return notSyncedDataSetsFileName;
+    }
+
+    public void setNotSyncedDataSetsFileName(String notSyncedDataSetsFileName)
+    {
+        this.notSyncedDataSetsFileName = notSyncedDataSetsFileName;
+    }
+
+    public String getDataSourceAlias()
+    {
+        return dataSourceAlias;
+    }
+
+    public void setDataSourceAlias(String dataSourceAlias)
+    {
+        this.dataSourceAlias = dataSourceAlias;
+    }
+
+    public List<String> getDataSourceSpaces()
+    {
+        return dataSourceSpaces;
+    }
+
+    public void setDataSourceSpaces(String dataSourceSpaces)
+    {
+        if (dataSourceSpaces == null)
+        {
+            return;
+        }
+        for (String token : dataSourceSpaces.split(SEPARATOR))
+        {
+            this.dataSourceSpaces.add(token.trim());
+        }
+    }
+
+    public List<String> getHarvesterSpaces()
+    {
+        return harvesterSpaces;
+    }
+
+    public void setHarvesterSpaces(String harvesterSpaces)
+    {
+        if (harvesterSpaces == null)
+        {
+            return;
+        }
+        for (String token : harvesterSpaces.split(SEPARATOR))
+        {
+            this.harvesterSpaces.add(token.trim());
+        }
+    }
+
+    public String getHarvesterTempDir()
+    {
+        return harvesterTempDir;
+    }
+
+    public void setHarvesterTempDir(String harvesterTempDir)
+    {
+        this.harvesterTempDir = harvesterTempDir;
+    }
+
+    public void printConfig()
+    {
+        for (Field field : this.getClass().getDeclaredFields())
+        {
+            field.setAccessible(true);
+            String name = field.getName();
+            Object value;
+            try
+            {
+                value = field.get(this);
+                System.out.printf("%s : %s%n", name, value);
+            } catch (IllegalArgumentException e)
+            {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+            } catch (IllegalAccessException e)
+            {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+            }
+        }
+    }
+
+    private BasicAuthCredentials auth;
+
+    private String dataSourceOpenbisURL;
+
+    private String dataSourceDSSURL;
+
+    private String lastSyncTimestampFileName;
+
+    private String notSyncedDataSetsFileName;
+
+    private String dataSourceAlias;
+
+    private List<String> dataSourceSpaces = new ArrayList<>();
+
+    private List<String> harvesterSpaces = new ArrayList<>();
+
+    private String harvesterTempDir;
+
+    private List<EMailAddress> emailAddresses = new ArrayList<>();
+
+    private String logFilePath;
+
+    private static final String SEPARATOR = ",";
+
+    private HashMap<String, String> spaceMappings = new HashMap<String, String>();
+
+    public HashMap<String, String> getSpaceMappings()
+    {
+        return spaceMappings;
+    }
+
+    public String getLogFilePath()
+    {
+        return logFilePath;
+    }
+
+    public void setLogFilePath(String logFilePath)
+    {
+        this.logFilePath = logFilePath;
+    }
+
+    public List<EMailAddress> getEmailAddresses()
+    {
+        return emailAddresses;
+    }
+
+    public void setEmailAddresses(String emailAddresses)
+    {
+        for (String token : emailAddresses.split(SEPARATOR))
+        {
+            this.emailAddresses.add(new EMailAddress(token.trim()));
+        }
+    }
+
+    public void setAuthCredentials(String realm, String user, String pass)
+    {
+        this.auth = new BasicAuthCredentials(realm, user, pass);
+    }
+
+    public BasicAuthCredentials getAuthenticationCredentials()
+    {
+        return auth;
+    }
+
+    public String getUser()
+    {
+        return this.auth.getUser();
+    }
+
+    public String getPassword()
+    {
+        return this.auth.getPassword();
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
new file mode 100644
index 00000000000..fc6c9d21432
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/config/SynchronizationConfigReader.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.DailyRollingFileAppender;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Space;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class SynchronizationConfigReader
+{
+    private static final String DEFAULT_HARVESTER_TEMP_DIR = "targets/store";
+
+    private static final String DATA_SOURCE_URL_PROPERTY_NAME = "resource-list-url";
+
+    private static final String DATA_SOURCE_OPENBIS_URL_PROPERTY_NAME = "data-source-openbis-url";
+
+    private static final String DATA_SOURCE_DSS_URL_PROPERTY_NAME = "data-source-dss-url";
+
+    private static final String DATA_SOURCE_SPACES_PROPERTY_NAME = "data-source-spaces";
+
+    private static final String DATA_SOURCE_ALIAS_PROPERTY_NAME = "data-source-alias";
+
+    private static final String DATA_SOURCE_AUTH_REALM_PROPERTY_NAME = "data-source-auth-realm";
+
+    private static final String DATA_SOURCE_AUTH_USER_PROPERTY_NAME = "data-source-auth-user";
+
+    private static final String DATA_SOURCE_AUTH_PASS_PROPERTY_NAME = "data-source-auth-pass";
+
+    private static final String HARVESTER_SPACES_PROPERTY_NAME = "harvester-spaces";
+
+    private static final String HARVESTER_TEMP_DIR_PROPERTY_NAME = "harvester-tmp-dir";
+
+    private static final String DEFAULT_LOG_FILE_NAME = "../../syncronization.log";
+
+    private static final String HARVESTER_LAST_SYNC_TIMESTAMP_FILE_PROPERTY_NAME = "last-sync-timestamp-file";
+
+    private static final String HARVESTER_NOT_SYNCED_DATA_SETS_FILE_NAME = "not-synced-data-sets-file";
+
+    private static final String EMAIL_ADDRESSES_PROPERTY_NAME = "email-addresses";
+
+    private String defaultLastSyncTimestampFileName = "last-sync-timestamp-file_{alias}.txt";
+
+    private String defaultNotSyncedDataSetsFileName = "not-synced-datasets_{alias}.txt";
+
+    private static final String LOG_FILE_PROPERTY_NAME = "log-file";
+
+    List<SyncConfig> configs = new ArrayList<>();
+
+    public List<SyncConfig> readConfiguration(File harvesterConfigFile, Logger logger) throws IOException
+    {
+        ConfigReader reader = new ConfigReader(harvesterConfigFile);
+        int sectionCount = reader.getSectionCount();
+        for (int i = 0; i < sectionCount; i++)
+        {
+            String section = reader.getSection(i);
+            SyncConfig config = new SyncConfig();
+            config.setEmailAddresses(reader.getString(section, EMAIL_ADDRESSES_PROPERTY_NAME, null, true));
+            config.setLogFilePath(reader.getString(section, LOG_FILE_PROPERTY_NAME, DEFAULT_LOG_FILE_NAME, false));
+            if (config.getLogFilePath() != null)
+            {
+                configureFileAppender(config, logger);
+            }
+
+            config.setDataSourceAlias(reader.getString(section, DATA_SOURCE_ALIAS_PROPERTY_NAME, null, true));
+            config.setDataSourceURI(reader.getString(section, DATA_SOURCE_URL_PROPERTY_NAME, null, true));
+            config.setDataSourceOpenbisURL(reader.getString(section, DATA_SOURCE_OPENBIS_URL_PROPERTY_NAME, null, true));
+            config.setDataSourceDSSURL(reader.getString(section, DATA_SOURCE_DSS_URL_PROPERTY_NAME, null, true));
+            String realm = reader.getString(section, DATA_SOURCE_AUTH_REALM_PROPERTY_NAME, null, true);
+            String user = reader.getString(section, DATA_SOURCE_AUTH_USER_PROPERTY_NAME, null, true);
+            String pass = reader.getString(section, DATA_SOURCE_AUTH_PASS_PROPERTY_NAME, null, true);
+            config.setAuthCredentials(realm, user, pass);
+
+            String dsSpaces = reader.getString(section, DATA_SOURCE_SPACES_PROPERTY_NAME, null, false);
+            if (dsSpaces != null)
+            {
+                config.setDataSourceSpaces(dsSpaces);
+            }
+            String hrvSpaces = reader.getString(section, HARVESTER_SPACES_PROPERTY_NAME, null, false);
+            if (hrvSpaces != null)
+            {
+                config.setHarvesterSpaces(hrvSpaces);
+            }
+            if (dsSpaces != null && hrvSpaces != null)
+            {
+                createDataSourceToHarvesterSpaceMappings(config);
+            }
+
+            config.setHarvesterTempDir(reader.getString(section, HARVESTER_TEMP_DIR_PROPERTY_NAME, DEFAULT_HARVESTER_TEMP_DIR, false));
+
+            defaultLastSyncTimestampFileName = defaultLastSyncTimestampFileName.replaceFirst(Pattern.quote("{alias}"), config.getDataSourceAlias());
+            config.setLastSyncTimestampFileName(
+                    reader.getString(section, HARVESTER_LAST_SYNC_TIMESTAMP_FILE_PROPERTY_NAME, defaultLastSyncTimestampFileName, false));
+
+            defaultNotSyncedDataSetsFileName = defaultNotSyncedDataSetsFileName.replaceFirst(Pattern.quote("{alias}"), config.getDataSourceAlias());
+            config.setNotSyncedDataSetsFileName(
+                    reader.getString(section, HARVESTER_NOT_SYNCED_DATA_SETS_FILE_NAME, defaultNotSyncedDataSetsFileName, false));
+            configs.add(config);
+        }
+        return configs;
+    }
+
+    private void configureFileAppender(SyncConfig config, Logger logger)
+    {
+        DailyRollingFileAppender console = new DailyRollingFileAppender(); // create appender
+        // configure the appender
+        console.setName("bdfile");
+        String PATTERN = "%d %-5p [%t] %c - %m%n";
+        console.setLayout(new PatternLayout(PATTERN));
+        // console.setThreshold(Level.FATAL);
+        console.setAppend(false);
+        console.setFile(config.getLogFilePath());
+        console.activateOptions();
+        // add appender to any Logger (here is root)
+        logger.addAppender(console);
+        logger.setAdditivity(false);
+    }
+
+    private void createDataSourceToHarvesterSpaceMappings(SyncConfig config)
+    {
+        List<String> dataSourceSpaceList = config.getDataSourceSpaces();
+        List<String> harvesterSpaceList = config.getHarvesterSpaces();
+        if (dataSourceSpaceList.size() != harvesterSpaceList.size())
+        {
+            throw new ConfigurationFailureException("Please specify a harvester space for each data source space.");
+        }
+        for (int i = 0; i < dataSourceSpaceList.size(); i++)
+        {
+            String harvesterSpace = harvesterSpaceList.get(i);
+            Space destSpace = ServiceProvider.getOpenBISService().tryGetSpace(new SpaceIdentifier(harvesterSpace));
+            if (destSpace == null)
+            {
+                throw new ConfigurationFailureException("Space " + harvesterSpace + " does not exist");
+            }
+            config.getSpaceMappings().put(dataSourceSpaceList.get(i), harvesterSpace);
+        }
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DSSFileUtils.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DSSFileUtils.java
new file mode 100644
index 00000000000..4ba6674df8e
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DSSFileUtils.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.io.InputStream;
+import java.util.List;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult;
+import ch.ethz.sis.openbis.generic.dssapi.v3.IDataStoreServerApi;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownloadOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.fetchoptions.DataSetFileFetchOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.id.IDataSetFileId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.search.DataSetFileSearchCriteria;
+import ch.systemsx.cisd.common.spring.HttpInvokerUtils;
+import ch.systemsx.cisd.common.ssl.SslCertificateHelper;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class DSSFileUtils
+{
+    public static final int TIMEOUT = 100000;
+
+    private final IDataStoreServerApi dss;
+    private final IApplicationServerApi as;
+
+    public static DSSFileUtils create(String asUrl, String dssUrl)
+    {
+        return new DSSFileUtils(asUrl, dssUrl, TIMEOUT);
+    }
+
+    private DSSFileUtils (String asUrl, String dssUrl, int timeout)
+    {
+        SslCertificateHelper.trustAnyCertificate(asUrl);
+        SslCertificateHelper.trustAnyCertificate(dssUrl);
+
+        this.dss = HttpInvokerUtils.createStreamSupportingServiceStub(IDataStoreServerApi.class, dssUrl + IDataStoreServerApi.SERVICE_URL, timeout);
+        this.as = HttpInvokerUtils.createServiceStub(IApplicationServerApi.class, asUrl + IApplicationServerApi.SERVICE_URL, timeout);    
+     }
+
+    public SearchResult<DataSetFile> searchFiles(String sessionToken, DataSetFileSearchCriteria criteria, DataSetFileFetchOptions dsFileFetchOptions)
+    {
+        return dss.searchFiles(sessionToken, criteria, dsFileFetchOptions);
+    }
+
+    public SearchResult<DataSetFile> searchWithDataSetCode(String sessionToken, String dataSetCode, DataSetFileFetchOptions dsFileFetchOptions)
+    {
+        DataSetFileSearchCriteria criteria = new DataSetFileSearchCriteria();
+        criteria.withDataSet().withCode().thatEquals(dataSetCode);
+        return searchFiles(sessionToken, criteria, dsFileFetchOptions);
+    }
+
+    public InputStream downloadFiles(String sessionToken, List<IDataSetFileId> fileIds, DataSetFileDownloadOptions options)
+    {
+        return dss.downloadFiles(sessionToken, fileIds, options);
+    }
+
+    public String login(String loginUser, String loginPass)
+    {
+        return as.login(loginUser, loginPass);
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DataSetRegistrationIngestionService.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DataSetRegistrationIngestionService.java
new file mode 100644
index 00000000000..6fa9b718229
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/DataSetRegistrationIngestionService.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownload;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownloadOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownloadReader;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.fetchoptions.DataSetFileFetchOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.id.DataSetFilePermId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.id.IDataSetFileId;
+import ch.systemsx.cisd.common.io.IOUtilities;
+import ch.systemsx.cisd.etlserver.registrator.api.v2.IDataSet;
+import ch.systemsx.cisd.etlserver.registrator.api.v2.IDataSetRegistrationTransactionV2;
+import ch.systemsx.cisd.etlserver.registrator.api.v2.IDataSetUpdatable;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.IngestionService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetProcessingContext;
+import ch.systemsx.cisd.openbis.dss.generic.shared.api.internal.v2.IExperimentImmutable;
+import ch.systemsx.cisd.openbis.dss.generic.shared.api.internal.v2.ISampleImmutable;
+import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.TableModel;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewProperty;
+
+class DataSetRegistrationIngestionService extends IngestionService<DataSetInformation>
+{
+    private static final long serialVersionUID = 1L;
+
+    private List<String> notSyncedDataSetCodes;
+
+    private final NewExternalData dataSet;
+
+    private final String loginUser;
+
+    private final String loginPass;
+
+    private final String asUrl;
+
+    private final String dssUrl;
+
+    private final String harvesterTempDir;
+
+    private final Logger log;
+
+    public DataSetRegistrationIngestionService(Properties properties, File storeRoot, List<String> notSyncedDataSetCodes, NewExternalData ds,
+            Logger operationLog)
+    {
+        super(properties, storeRoot);
+        this.notSyncedDataSetCodes = notSyncedDataSetCodes;
+        this.dataSet = ds;
+        this.loginUser = properties.getProperty("user");
+        this.loginPass = properties.getProperty("pass");
+        this.asUrl = properties.getProperty("as-url");
+        this.dssUrl = properties.getProperty("dss-url");
+        this.harvesterTempDir = properties.getProperty("harvester-temp-dir");
+        this.log = operationLog;
+    }
+
+    @Override
+    protected TableModel process(IDataSetRegistrationTransactionV2 transaction, Map<String, Object> parameters, DataSetProcessingContext context)
+    {
+        String dataSetCode = dataSet.getCode();
+        ISampleImmutable sample = null;
+        if (dataSet.getSampleIdentifierOrNull() != null)
+        {
+            sample = transaction.getSampleForUpdate(dataSet.getSampleIdentifierOrNull().toString());
+        }
+        IExperimentImmutable experiment = null;
+        if (dataSet.getExperimentIdentifierOrNull() != null)
+        {
+            experiment = transaction.getExperimentForUpdate(dataSet.getExperimentIdentifierOrNull().toString());
+        }
+
+        List<NewProperty> dataSetProperties = dataSet.getDataSetProperties();
+
+        IDataSetUpdatable dataSetForUpdate = transaction.getDataSetForUpdate(dataSetCode);
+        if (dataSetForUpdate == null)
+        {
+            // REGISTER NEW DATA SET after downloading the data set files
+            File storeRoot = transaction.getGlobalState().getStoreRootDir();
+            File temp = new File(storeRoot, this.harvesterTempDir);
+            temp.mkdirs();
+            File dir = new File(temp, dataSetCode);
+            dir.mkdirs();
+
+            try
+            {
+                downloadDataSetFiles(dir, dataSetCode);
+            } catch (Exception e)
+            {
+                return errorTableModel(parameters, e);
+            }
+
+            IDataSet ds = transaction.createNewDataSet(dataSet.getDataSetType().getCode(), dataSet.getCode());
+            ds.setSample(sample);
+            ds.setExperiment(experiment);
+            for (NewProperty newProperty : dataSetProperties)
+            {
+                ds.setPropertyValue(newProperty.getPropertyCode(), newProperty.getValue());
+            }
+
+            for (File f : dir.listFiles())
+            {
+                transaction.moveFile(f.getAbsolutePath(), ds);
+            }
+        }
+        else
+        {
+            // UPDATE data set meta data excluding the container/contained relationships
+            dataSetForUpdate.setSample(sample);
+            dataSetForUpdate.setExperiment(experiment);
+            dataSetForUpdate.setParentDatasets(dataSet.getParentDataSetCodes());
+            for (NewProperty newProperty : dataSetProperties)
+            {
+                dataSetForUpdate.setPropertyValue(newProperty.getPropertyCode(), newProperty.getValue());
+            }
+        }
+        return null;
+    }
+
+    class FileDetails
+    {
+        final int crc32checksum;
+
+        final long fileLength;
+
+        public FileDetails(int crc32checksum, long fileLength)
+        {
+            super();
+            this.crc32checksum = crc32checksum;
+            this.fileLength = fileLength;
+        }
+
+        public int getCrc32checksum()
+        {
+            return crc32checksum;
+        }
+
+        public long getFileLength()
+        {
+            return fileLength;
+        }
+    }
+
+    private void downloadDataSetFiles(File dir, String dataSetCode) throws Exception
+    {
+        DSSFileUtils dssFileUtils = DSSFileUtils.create(asUrl, dssUrl);
+        String sessionToken = dssFileUtils.login(loginUser, loginPass);
+        DataSetFileFetchOptions dsFileFetchOptions = new DataSetFileFetchOptions();
+        SearchResult<DataSetFile> result = dssFileUtils.searchWithDataSetCode(sessionToken, dataSetCode, dsFileFetchOptions);
+        List<DataSetFile> files = result.getObjects();
+
+        List<IDataSetFileId> fileIds = new LinkedList<IDataSetFileId>();
+        Map<DataSetFilePermId, FileDetails> fileDetailsMap = new HashMap<DataSetFilePermId, FileDetails>();
+        for (DataSetFile f : files)
+        {
+            fileIds.add(f.getPermId());
+            fileDetailsMap.put(f.getPermId(), new FileDetails(f.getChecksumCRC32(), f.getFileLength()));
+        }
+        // Download the files & print the contents
+        DataSetFileDownloadOptions options = new DataSetFileDownloadOptions();
+        options.setRecursive(false);
+        InputStream stream = dssFileUtils.downloadFiles(sessionToken, fileIds, options);
+        DataSetFileDownloadReader reader = new DataSetFileDownloadReader(stream);
+        DataSetFileDownload fileDownload = null;
+        while ((fileDownload = reader.read()) != null)
+        {
+            DataSetFile orgFile = fileDownload.getDataSetFile();
+            if (orgFile.getPath().equals(""))
+                continue;
+            // if (dsFile.getPath().equals("original"))
+            // continue;
+            String filePath = orgFile.getPath();// .substring("original/".length());
+            File output = new File(dir.getAbsolutePath(), filePath);
+            if (orgFile.isDirectory())
+            {
+                output.mkdirs();
+            }
+            else
+            {
+                DataSetFilePermId filePermId = orgFile.getPermId();
+                FileDetails fileDetails = fileDetailsMap.get(filePermId);
+
+                // System.out.println("Downloaded " + orgFile.getPath() + " "
+                // + MemorySizeFormatter.format(orgFile.getFileLength()));
+
+                Path path = Paths.get(dir.getAbsolutePath(), filePath);
+                InputStream inputStream = fileDownload.getInputStream();
+                OutputStream outputStream = Files.newOutputStream(path);
+                int checksumCRC32 = IOUtilities.copyAndGetChecksumCRC32(inputStream, outputStream);
+                File copiedFile = new File(path.normalize().toString());
+                if (checksumCRC32 != fileDetails.getCrc32checksum()
+                        || copiedFile.length() != fileDetails.getFileLength())
+                {
+                    throw new RuntimeException("Crc32 or file length does not match for  " + orgFile.getPath() + " calculated:" + checksumCRC32
+                            + " expected:"
+                            + fileDetails.getCrc32checksum());
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
new file mode 100644
index 00000000000..6fabf021752
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/EntitySynchronizer.java
@@ -0,0 +1,1102 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
+import org.w3c.dom.Document;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.IApplicationServerApi;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.DataSetKind;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.delete.DataSetDeletionOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.id.DataSetPermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.deletion.id.IDeletionId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.delete.ExperimentDeletionOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id.ExperimentPermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.material.delete.MaterialDeletionOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.material.id.MaterialPermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.project.delete.ProjectDeletionOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.project.id.ProjectPermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.delete.SampleDeletionOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id.SamplePermId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.IDataStoreServerApi;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.fetchoptions.DataSetFileFetchOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.search.DataSetFileSearchCriteria;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.EntityRetriever;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.common.ServiceFinderUtils;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.SyncConfig;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.Connection;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.DataSetWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.ExperimentWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.MaterialWithLastModificationDate;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.ProjectWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.SampleWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.datasourceconnector.DataSourceConnector;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.datasourceconnector.IDataSourceConnector;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator.INameTranslator;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator.PrefixBasedNameTranslator;
+import ch.ethz.sis.openbis.generic.shared.entitygraph.EntityGraph;
+import ch.ethz.sis.openbis.generic.shared.entitygraph.Node;
+import ch.systemsx.cisd.common.concurrent.ITaskExecutor;
+import ch.systemsx.cisd.common.concurrent.ParallelizedExecutor;
+import ch.systemsx.cisd.common.exceptions.Status;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.logging.Log4jSimpleLogger;
+import ch.systemsx.cisd.etlserver.registrator.api.v1.impl.ConversionUtils;
+import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetDirectoryProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.DataSetProcessingContext;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IConfigProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataSetDirectoryProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IShareIdManager;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.utils.SegmentedStoreUtils;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.EncapsulatedCommonServer;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.MasterDataRegistrationException;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.MasterDataRegistrationTransaction;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.impl.MasterDataTransactionErrors;
+import ch.systemsx.cisd.openbis.generic.shared.basic.TechId;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Experiment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IEntityProperty;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.ListSampleCriteria;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Material;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.MaterialIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewAttachment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewExperiment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewMaterialWithType;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewProject;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewSample;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewSpace;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.PhysicalDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Project;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Space;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.TableModel;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.TableModelColumnHeader;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.PropertyBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.dto.AtomicEntityOperationResult;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DataSetBatchUpdatesDTO;
+import ch.systemsx.cisd.openbis.generic.shared.dto.ExperimentUpdatesDTO;
+import ch.systemsx.cisd.openbis.generic.shared.dto.MaterialUpdateDTO;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewContainerDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewProperty;
+import ch.systemsx.cisd.openbis.generic.shared.dto.ProjectUpdatesDTO;
+import ch.systemsx.cisd.openbis.generic.shared.dto.SampleUpdatesDTO;
+import ch.systemsx.cisd.openbis.generic.shared.dto.builders.AtomicEntityOperationDetailsBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ProjectIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ProjectIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier;
+
+/**
+ * @author Ganime Betul Akin
+ */
+public class EntitySynchronizer
+{
+    private final String dataStoreCode;
+
+    private final File storeRoot;
+
+    private final IEncapsulatedOpenBISService service;
+
+    private final DataSetProcessingContext context;
+
+    private final Date lastSyncTimestamp;
+
+    private final Set<String> dataSetsCodesToRetry;
+
+    private final SyncConfig config;
+
+    private final Logger operationLog;
+
+    private final Set<String> blackListedDataSetCodes;
+
+    private final MasterDataRegistrationTransaction masterDataRegistrationTransaction;
+
+    public EntitySynchronizer(IEncapsulatedOpenBISService service, String dataStoreCode, File storeRoot, Date lastSyncTimestamp,
+            Set<String> dataSetsCodesToRetry, Set<String> blackListedDataSetCodes, DataSetProcessingContext context,
+            SyncConfig config, Logger operationLog)
+    {
+        this.service = service;
+        this.dataStoreCode = dataStoreCode;
+        this.storeRoot = storeRoot;
+        this.lastSyncTimestamp = lastSyncTimestamp;
+        this.dataSetsCodesToRetry = dataSetsCodesToRetry;
+        this.blackListedDataSetCodes = blackListedDataSetCodes;
+        this.context = context;
+        this.config = config;
+        this.operationLog = operationLog;
+        this.masterDataRegistrationTransaction = getMasterDataRegistrationTransaction();
+    }
+
+    public Date syncronizeEntities() throws Exception
+    {
+        DataSourceConnector dataSourceConnector = new DataSourceConnector(config.getDataSourceURI(), config.getAuthenticationCredentials());
+        return syncronizeEntities(dataSourceConnector);
+    }
+
+    public Date syncronizeEntities(IDataSourceConnector dataSourceConnector) throws Exception
+    {
+        // retrieve the document from the data source
+        operationLog.info("Retrieving the resource list..");
+        Document doc = dataSourceConnector.getResourceListAsXMLDoc(Arrays.asList(ArrayUtils.EMPTY_STRING_ARRAY));
+
+        // Parse the resource list: This sends back all projects,
+        // experiments, samples and data sets contained in the XML together with their last modification date to be used for filtering
+        operationLog.info("parsing the resource list xml document");
+        String dataSourcePrefix = config.getDataSourceAlias();
+        INameTranslator nameTranslator = null;
+        if (dataSourcePrefix != null && dataSourcePrefix.trim().equals("") == false)
+        {
+            nameTranslator = new PrefixBasedNameTranslator(dataSourcePrefix);
+        }
+
+        ResourceListParser parser = ResourceListParser.create(nameTranslator, dataStoreCode, masterDataRegistrationTransaction); // ,
+                                                                                                                                 // lastSyncTimestamp
+        ResourceListParserData data = parser.parseResourceListDocument(doc);
+
+        processDeletions(data);
+
+        operationLog.info("registering master data");
+        // registerMasterData();
+
+        AtomicEntityOperationDetailsBuilder builder = new AtomicEntityOperationDetailsBuilder();
+
+        for (String spaceCode : data.getHarvesterSpaceList())
+        {
+            Space space = service.tryGetSpace(new SpaceIdentifier(spaceCode));
+            if (space == null)
+            {
+                builder.space(new NewSpace(spaceCode, "Synchronized from: " + config.getDataSourceURI(), null));
+            }
+        }
+
+        processMetaData(data, builder);
+
+        operationLog.info("Registering meta data...");
+        AtomicEntityOperationResult operationResult = service.performEntityOperations(builder.getDetails());
+        operationLog.info("entity operation result: " + operationResult);
+
+        // register physical data sets without any hierarchy
+        // Note that container/component and parent/child relationships are established post-reg.
+        // setParentDataSetsOnTheChildren(data);
+        Map<String, DataSetWithConnections> physicalDSMap =
+                data.filterPhysicalDataSetsByLastModificationDate(lastSyncTimestamp, dataSetsCodesToRetry);
+        operationLog.info("Registering data sets...");
+        List<String> notRegisteredDataSets = registerPhysicalDataSets(physicalDSMap);
+        operationLog.info((physicalDSMap.keySet().size() - notRegisteredDataSets.size()) + " data set(s) have been successfully registered. "
+                + notRegisteredDataSets.size()
+                + " data set(s) FAILED to register ");
+
+        // link physical data sets registered above to container data sets
+        // and set parent/child relationships
+        operationLog.info("start linking/un-linking container and component data sets");
+        establishDataSetRelationships(data.getDataSetsToProcess(), notRegisteredDataSets, physicalDSMap);
+
+        return data.getResourceListTimestamp();
+    }
+
+    private void establishDataSetRelationships(Map<String, DataSetWithConnections> dataSetsToProcess,
+            List<String> notRegisteredDataSets, Map<String, DataSetWithConnections> physicalDSMap)
+    {
+        // set parent and container data set codes before everything else
+        // container and physical data sets can both be parents/children of each other
+        AtomicEntityOperationDetailsBuilder builder = new AtomicEntityOperationDetailsBuilder();
+        Map<String, NewExternalData> datasetsToUpdate = new HashMap<String, NewExternalData>();
+        Map<String, Set<String>> dsToParents = new HashMap<String, Set<String>>();
+        Map<String, Set<String>> dsToContained = new HashMap<String, Set<String>>();
+        for (DataSetWithConnections dsWithConn : dataSetsToProcess.values())
+        {
+            for (Connection conn : dsWithConn.getConnections())
+            {
+                NewExternalData dataSet = dsWithConn.getDataSet();
+                if (dataSetsToProcess.containsKey(conn.getToPermId()) && conn.getType().equals("Child"))
+                {
+                    if (notRegisteredDataSets.contains(dataSet.getCode()) == false)
+                    {
+                        NewExternalData childDataSet = dataSetsToProcess.get(conn.getToPermId()).getDataSet();
+                        List<String> parentDataSetCodes = childDataSet.getParentDataSetCodes();
+                        parentDataSetCodes.add(dataSet.getCode());
+                        dsToParents.put(childDataSet.getCode(), new HashSet<String>(parentDataSetCodes));
+                    }
+                }
+                else if (dataSetsToProcess.containsKey(conn.getToPermId()) && conn.getType().equals("Component"))
+                {
+                    NewExternalData componentDataSet = dataSetsToProcess.get(conn.getToPermId()).getDataSet();
+                    if (notRegisteredDataSets.contains(componentDataSet.getCode()) == false)
+                    {
+                        NewContainerDataSet containerDataSet = (NewContainerDataSet) dataSet;
+                        List<String> containedDataSetCodes = containerDataSet.getContainedDataSetCodes();
+                        containedDataSetCodes.add(componentDataSet.getCode());
+                        dsToContained.put(dataSet.getCode(), new HashSet<String>(containedDataSetCodes));
+                    }
+                }
+            }
+        }
+        // go through all the data sets, decide what needs to be updated
+        for (DataSetWithConnections dsWithConn : dataSetsToProcess.values())
+        {
+            NewExternalData dataSet = (NewExternalData) dsWithConn.getDataSet();
+
+            if (dsWithConn.getLastModificationDate().after(lastSyncTimestamp)
+                    || dataSetsCodesToRetry.contains(dataSet.getCode()) == true
+                    || isParentModified(dsToParents, dataSet))
+            {
+                if (physicalDSMap.containsKey(dataSet.getCode()) == false && service.tryGetDataSet(dataSet.getCode()) == null)
+                {
+                    builder.dataSet(dataSet);
+                }
+                else
+                {
+                    datasetsToUpdate.put(dataSet.getCode(), dataSet);
+                }
+            }
+        }
+
+        // go thru to-be-updated DS list and establish/break relations
+        for (NewExternalData dataSet : datasetsToUpdate.values())
+        {
+            // if the DS could not have been registered for some reason,
+            // skip this.
+            AbstractExternalData dsInHarvester = service.tryGetDataSet(dataSet.getCode());
+            if (dsInHarvester == null)
+            {
+                continue;
+            }
+            DataSetBatchUpdatesDTO dsBatchUpdatesDTO = createDataSetBatchUpdateDTO(dataSet, dsInHarvester);
+            if (dataSet instanceof NewContainerDataSet)
+            {
+                NewContainerDataSet containerDS = (NewContainerDataSet) dataSet;
+                if (dsToContained.containsKey(containerDS.getCode()))
+                {
+                    dsBatchUpdatesDTO.setModifiedContainedDatasetCodesOrNull(dsToContained.get(dataSet.getCode()).toArray(new
+                            String[containerDS.getContainedDataSetCodes().size()]));
+                }
+                else
+                {
+                    dsBatchUpdatesDTO.setModifiedContainedDatasetCodesOrNull(new String[0]);
+                }
+                dsBatchUpdatesDTO.getDetails().setContainerUpdateRequested(true);
+            }
+            if (dsToParents.containsKey(dataSet.getCode()))
+            {
+                dsBatchUpdatesDTO.setModifiedParentDatasetCodesOrNull(dsToParents.get(dataSet.getCode()).toArray(
+                        new String[dataSet.getParentDataSetCodes().size()]));
+                // TODO should this always be true or should we flag the ones that require parent update. Same for container
+            }
+            else
+            {
+                dsBatchUpdatesDTO.setModifiedParentDatasetCodesOrNull(new String[0]);
+            }
+            dsBatchUpdatesDTO.getDetails().setParentsUpdateRequested(true);
+            SampleIdentifier sampleIdentifier = dataSet.getSampleIdentifierOrNull();
+            if (sampleIdentifier != null)
+            {
+                Sample sampleWithExperiment = service.tryGetSampleWithExperiment(sampleIdentifier);
+                dsBatchUpdatesDTO.setSampleIdentifierOrNull(SampleIdentifierFactory.parse(sampleWithExperiment.getIdentifier()));
+                dsBatchUpdatesDTO.getDetails().setSampleUpdateRequested(true);
+            }
+            else
+            {
+                dsBatchUpdatesDTO.setSampleIdentifierOrNull(null);
+                dsBatchUpdatesDTO.getDetails().setSampleUpdateRequested(true);
+            }
+
+            ExperimentIdentifier expIdentifier = dataSet.getExperimentIdentifierOrNull();
+            if (expIdentifier != null)
+            {
+                Experiment experiment = service.tryGetExperiment(expIdentifier);
+                dsBatchUpdatesDTO.setExperimentIdentifierOrNull(ExperimentIdentifierFactory.parse(experiment.getIdentifier()));
+                dsBatchUpdatesDTO.getDetails().setExperimentUpdateRequested(true);
+            }
+            else
+            {
+                dsBatchUpdatesDTO.setExperimentIdentifierOrNull(null);
+                dsBatchUpdatesDTO.getDetails().setExperimentUpdateRequested(true);
+            }
+            builder.dataSetUpdate(dsBatchUpdatesDTO);
+        }
+        AtomicEntityOperationResult operationResult = service.performEntityOperations(builder.getDetails());
+        operationLog.info("entity operation result: " + operationResult);
+    }
+
+    private boolean isParentModified(Map<String, Set<String>> dsToParents, NewExternalData dataSet)
+    {
+        Set<String> parents = dsToParents.get(dataSet.getCode());
+        if (parents == null)
+        {
+            return false;
+        }
+        for (String parentDSCode : parents)
+        {
+            if (dataSetsCodesToRetry.contains(parentDSCode))
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private List<String> registerPhysicalDataSets(Map<String, DataSetWithConnections> physicalDSMap) throws IOException
+    {
+        List<DataSetWithConnections> dsList = new ArrayList<DataSetWithConnections>(physicalDSMap.values());
+        List<String> notRegisteredDataSetCodes = Collections.synchronizedList(new ArrayList<String>());
+
+        // This parallelization is possible because each DS is registered without dependencies
+        // and the dependencies are established later on in the sync process.
+        ParallelizedExecutor.process(dsList, new DataSetRegistrationTaskExecutor(notRegisteredDataSetCodes), 0.5, 10, "register data sets", 0, false);
+
+        // backup the current not synced data set codes file, delete the original file
+        saveNotSyncedDataSetsFile(notRegisteredDataSetCodes);
+
+        return notRegisteredDataSetCodes;
+    }
+
+    private void saveNotSyncedDataSetsFile(List<String> notRegisteredDataSetCodes) throws IOException
+    {
+        File notSyncedDataSetsFile = new File(config.getNotSyncedDataSetsFileName());
+        if (notSyncedDataSetsFile.exists())
+        {
+            backupAndResetNotSyncedDataSetsFile(notSyncedDataSetsFile);
+        }
+
+        for (String dsCode : notRegisteredDataSetCodes)
+        {
+            FileUtilities.appendToFile(notSyncedDataSetsFile, dsCode, true);
+        }
+        // append the blacklisted codes to the end of the file
+        for (String dsCode : blackListedDataSetCodes)
+        {
+            FileUtilities.appendToFile(notSyncedDataSetsFile, dsCode, true);
+        }
+    }
+
+    private void backupAndResetNotSyncedDataSetsFile(File notSyncedDataSetsFile) throws IOException
+    {
+        File backupLastSyncTimeStampFile = new File(config.getNotSyncedDataSetsFileName() + ".bk");
+        FileUtils.copyFile(notSyncedDataSetsFile, backupLastSyncTimeStampFile);
+        FileUtils.writeStringToFile(notSyncedDataSetsFile, "");
+    }
+
+    private void processMetaData(ResourceListParserData data, AtomicEntityOperationDetailsBuilder builder)
+    {
+        processProjects(data, builder);
+
+        processExperiments(data, builder);
+
+        processSamples(data, builder);
+
+        processMaterials(data, builder);
+    }
+
+    private void registerMasterData()
+    {
+        masterDataRegistrationTransaction.commit();
+        MasterDataTransactionErrors transactionErrors = masterDataRegistrationTransaction.getTransactionErrors();
+        if (false == transactionErrors.getErrors().isEmpty())
+        {
+            MasterDataRegistrationException masterDataRegistrationException =
+                    new MasterDataRegistrationException("Master data synchronization finished with errors:",
+                            Collections
+                                    .<MasterDataTransactionErrors> singletonList(transactionErrors));
+            operationLog.info("Master data synchronizatio finished with errors");
+            masterDataRegistrationException.logErrors(new Log4jSimpleLogger(operationLog));
+        }
+    }
+
+    private MasterDataRegistrationTransaction getMasterDataRegistrationTransaction()
+    {
+        String openBisServerUrl = ServiceProvider.getConfigProvider().getOpenBisServerUrl();
+        String sessionToken = ServiceProvider.getOpenBISService().getSessionToken();
+        EncapsulatedCommonServer encapsulatedCommonServer = ServiceFinderUtils.getEncapsulatedCommonServer(sessionToken, openBisServerUrl);
+        return new MasterDataRegistrationTransaction(encapsulatedCommonServer);
+    }
+
+    private void processDeletions(ResourceListParserData data) throws NoSuchAlgorithmException, UnsupportedEncodingException
+    {
+        operationLog.info("Processing deletions");
+        String sessionToken = ServiceProvider.getOpenBISService().getSessionToken();
+        EntityRetriever entityRetriever =
+                EntityRetriever.createWithSessionToken(ServiceProvider.getV3ApplicationService(), sessionToken);
+
+        Set<String> incomingProjectPermIds = data.getProjectsToProcess().keySet();
+        Set<String> incomingExperimentPermIds = data.getExperimentsToProcess().keySet();
+        Set<String> incomingSamplePermIds = data.getSamplesToProcess().keySet();
+        Set<String> incomingDataSetCodes = data.getDataSetsToProcess().keySet();
+        Set<String> incomingMaterialCodes = data.getMaterialsToProcess().keySet();
+
+        // find projects, experiments, samples and data sets to be deleted
+        List<ProjectPermId> projectPermIds = new ArrayList<ProjectPermId>();
+        List<ExperimentPermId> experimentPermIds = new ArrayList<ExperimentPermId>();
+        List<SamplePermId> samplePermIds = new ArrayList<SamplePermId>();
+        List<DataSetPermId> dsPermIds = new ArrayList<DataSetPermId>();
+        List<MaterialPermId> matPermIds = new ArrayList<MaterialPermId>();
+
+        Set<PhysicalDataSet> physicalDataSetsDelete = new HashSet<PhysicalDataSet>();
+        // first find out the entities to be deleted
+        for (String harvesterSpaceId : data.getHarvesterSpaceList())
+        {
+            EntityGraph<Node<?>> harvesterEntityGraph = entityRetriever.getEntityGraph(harvesterSpaceId);
+            List<Node<?>> entities = harvesterEntityGraph.getNodes();
+            for (Node<?> entity : entities)
+            {
+                if (entity.getEntityKind().equals("PROJECT"))
+                {
+                    if (incomingProjectPermIds.contains(entity.getPermId()) == false)
+                    {
+                        projectPermIds.add(new ProjectPermId(entity.getPermId()));
+                    }
+                }
+                else if (entity.getEntityKind().equals("EXPERIMENT"))
+                {
+                    if (incomingExperimentPermIds.contains(entity.getPermId()) == false)
+                    {
+                        experimentPermIds.add(new ExperimentPermId(entity.getPermId()));
+                    }
+                    else
+                    {
+                        String typeCodeOrNull = entity.getTypeCodeOrNull();
+                        NewExperiment exp = data.getExperimentsToProcess().get(entity.getPermId()).getExperiment();
+                        if (typeCodeOrNull.equals(exp.getExperimentTypeCode()) == false)
+                        {
+                            experimentPermIds.add(new ExperimentPermId(entity.getPermId()));
+                        }
+                    }
+                }
+                else if (entity.getEntityKind().equals("SAMPLE"))
+                {
+                    if (incomingSamplePermIds.contains(entity.getPermId()) == false)
+                    {
+                        samplePermIds.add(new SamplePermId(entity.getPermId()));
+                    }
+                    else
+                    {
+                        String typeCodeOrNull = entity.getTypeCodeOrNull();
+                        NewSample smp = data.getSamplesToProcess().get(entity.getPermId()).getSample();
+                        if (typeCodeOrNull.equals(smp.getSampleType().getCode()) == false)
+                        {
+                            samplePermIds.add(new SamplePermId(entity.getPermId()));
+                        }
+                    }
+                }
+                else if (entity.getEntityKind().equals("DATA_SET"))
+                {
+                    if (incomingDataSetCodes.contains(entity.getPermId()) == false)
+                    {
+                        dsPermIds.add(new DataSetPermId(entity.getPermId()));
+                    }
+                    else
+                    {
+                        boolean sameDS = true;
+                        // if (ds.getKind() == DataSetKind.PHYSICAL && ds.lastModificationDate.after(lastSyncDate))
+                        String typeCodeOrNull = entity.getTypeCodeOrNull();
+
+                        DataSetWithConnections dsWithConns = data.getDataSetsToProcess().get(entity.getPermId());
+                        NewExternalData ds = dsWithConns.getDataSet();
+                        if (typeCodeOrNull.equals(ds.getDataSetType().getCode()) == false)
+                        {
+                            sameDS = false;
+                        }
+                        else
+                        {
+                            if (dsWithConns.getKind() == DataSetKind.PHYSICAL && dsWithConns.getLastModificationDate().after(lastSyncTimestamp))
+                            {
+                                PhysicalDataSet physicalDS = service.tryGetDataSet(entity.getPermId()).tryGetAsDataSet();
+                                sameDS = deepCompareDataSets(entity.getPermId());
+                                if (sameDS == false)
+                                    physicalDataSetsDelete.add(physicalDS);
+                            }
+                        }
+                        if (sameDS == false)
+                        {
+                            dsPermIds.add(new DataSetPermId(entity.getPermId()));
+                        }
+                    }
+                }
+            }
+        }
+
+        List<ch.ethz.sis.openbis.generic.asapi.v3.dto.material.Material> materials = entityRetriever.fetchMaterials();
+
+        for (ch.ethz.sis.openbis.generic.asapi.v3.dto.material.Material material : materials)
+        {
+            if (incomingMaterialCodes.contains(material.getCode()) == false)
+            {
+                matPermIds.add(new MaterialPermId(material.getCode(), material.getType().getCode()));
+            }
+        }
+
+        IApplicationServerApi v3Api = ServiceProvider.getV3ApplicationService();
+
+        // delete data sets
+        DataSetDeletionOptions dsDeletionOpts = new DataSetDeletionOptions();
+        dsDeletionOpts.setReason("sync data set deletions"); // TODO maybe mention data source space id in the reason
+
+        IDeletionId dsDeletionId =
+                v3Api.deleteDataSets(sessionToken, dsPermIds, dsDeletionOpts);
+
+        // delete samples
+        SampleDeletionOptions sampleDeletionOptions = new SampleDeletionOptions();
+        sampleDeletionOptions.setReason("sync sample deletions");
+        IDeletionId smpDeletionId = v3Api.deleteSamples(sessionToken, samplePermIds, sampleDeletionOptions);
+
+        // delete experiments
+        ExperimentDeletionOptions expDeletionOpts = new ExperimentDeletionOptions();
+        expDeletionOpts.setReason("sync experiment deletions");
+        IDeletionId expDeletionId = v3Api.deleteExperiments(sessionToken, experimentPermIds, expDeletionOpts);
+
+        // delete projects
+        ProjectDeletionOptions prjDeletionOpts = new ProjectDeletionOptions();
+        prjDeletionOpts.setReason("Sync projects");
+        v3Api.deleteProjects(sessionToken, projectPermIds, prjDeletionOpts);
+
+        // delete materials
+        MaterialDeletionOptions matDeletionOptions = new MaterialDeletionOptions();
+        matDeletionOptions.setReason("sync materials");
+        v3Api.deleteMaterials(sessionToken, matPermIds, matDeletionOptions);
+
+        // confirm deletions
+        ArrayList<IDeletionId> deletionIds = new ArrayList<IDeletionId>();
+
+        StringBuffer summary = new StringBuffer();
+        if (projectPermIds.size() > 0)
+        {
+            summary.append(projectPermIds.size() + " projects,");
+        }
+        if (matPermIds.size() > 0)
+        {
+            summary.append(matPermIds.size() + " materials,");
+        }
+        if (expDeletionId != null)
+        {
+            deletionIds.add(expDeletionId);
+            summary.append(experimentPermIds.size() + " experiments,");
+        }
+        if (smpDeletionId != null)
+        {
+            deletionIds.add(smpDeletionId);
+            summary.append(samplePermIds.size() + " samples,");
+        }
+        if (dsDeletionId != null)
+        {
+            deletionIds.add(dsDeletionId);
+            summary.append(dsPermIds.size() + " data sets");
+        }
+        v3Api.confirmDeletions(sessionToken, deletionIds); // Arrays.asList(expDeletionId, dsDeletionId, smpDeletionId)
+
+        if (summary.length() > 0)
+        {
+            operationLog.info(summary.substring(0, summary.length() - 1) + " have been deleted:");
+        }
+        for (PhysicalDataSet physicalDS : physicalDataSetsDelete)
+        {
+            operationLog.info("Is going to delete the location: " + physicalDS.getLocation());
+            File datasetDir =
+                    getDirectoryProvider().getDataSetDirectory(physicalDS);
+            SegmentedStoreUtils.deleteDataSetInstantly(physicalDS.getCode(), datasetDir, new Log4jSimpleLogger(operationLog));
+        }
+    }
+
+    private void processExperiments(ResourceListParserData data,
+            AtomicEntityOperationDetailsBuilder builder)
+    {
+        // process experiments
+        Map<String, ExperimentWithConnections> experimentsToProcess = data.getExperimentsToProcess();
+        for (ExperimentWithConnections exp : experimentsToProcess.values())
+        {
+            NewExperiment newIncomingExp = exp.getExperiment();
+            if (exp.getLastModificationDate().after(lastSyncTimestamp))
+            {
+                Experiment experiment = null;
+                try
+                {
+                    experiment = service.tryGetExperimentByPermId(newIncomingExp.getPermID());
+                } catch (Exception e)
+                {
+                    // doing nothing because when the experiment with the perm id not found
+                    // an exception will be thrown. Seems to be the same with entity kinds
+                }
+
+                if (experiment == null)
+                {
+                    // ADD EXPERIMENT
+                    builder.experiment(newIncomingExp);
+                }
+                else
+                {
+                    // UPDATE EXPERIMENT
+                    ExperimentUpdatesDTO expUpdate = createExperimentUpdateDTOs(newIncomingExp, experiment);
+                    builder.experimentUpdate(expUpdate);
+                }
+            }
+            handleExperimentConnections(data, exp, newIncomingExp);
+        }
+    }
+
+    private void handleExperimentConnections(ResourceListParserData data, ExperimentWithConnections exp, NewExperiment newIncomingExp)
+    {
+        Map<String, SampleWithConnections> samplesToProcess = data.getSamplesToProcess();
+        Map<String, DataSetWithConnections> dataSetsToProcess = data.getDataSetsToProcess();
+        for (Connection conn : exp.getConnections())
+        {
+            if (samplesToProcess.containsKey(conn.getToPermId()))
+            {
+                SampleWithConnections sample = samplesToProcess.get(conn.getToPermId());
+                NewSample newSample = sample.getSample();
+                newSample.setExperimentIdentifier(newIncomingExp.getIdentifier());
+            }
+            if (dataSetsToProcess.containsKey(conn.getToPermId()))
+            {
+                NewExternalData externalData = dataSetsToProcess.get(conn.getToPermId()).getDataSet();
+                externalData.setExperimentIdentifierOrNull(ExperimentIdentifierFactory.parse(newIncomingExp.getIdentifier()));
+            }
+        }
+    }
+
+    private ExperimentUpdatesDTO createExperimentUpdateDTOs(NewExperiment newIncomingExp, Experiment experiment)
+    {
+        ExperimentUpdatesDTO expUpdate = new ExperimentUpdatesDTO();
+        expUpdate.setProjectIdentifier(ExperimentIdentifierFactory.parse(newIncomingExp.getIdentifier()));
+        expUpdate.setVersion(experiment.getVersion());
+        expUpdate.setProperties(Arrays.asList(newIncomingExp.getProperties()));
+        expUpdate.setExperimentId(TechId.create(experiment));
+        // TODO attachments
+        expUpdate.setAttachments(Collections.<NewAttachment> emptyList());
+        return expUpdate;
+    }
+
+    private void processMaterials(ResourceListParserData data, AtomicEntityOperationDetailsBuilder builder)
+    {
+        // process materials
+        Map<String, MaterialWithLastModificationDate> materialsToProcess = data.getMaterialsToProcess();
+        for (MaterialWithLastModificationDate newMaterialWithType : materialsToProcess.values())
+        {
+            NewMaterialWithType newIncomingMaterial = newMaterialWithType.getMaterial();
+            if (newMaterialWithType.getLastModificationDate().after(lastSyncTimestamp))
+            {
+                Material material = service.tryGetMaterial(new MaterialIdentifier(newIncomingMaterial.getCode(), newIncomingMaterial.getType()));
+                if (material == null)
+                {
+                    builder.material(newIncomingMaterial);
+                }
+                else
+                {
+                    MaterialUpdateDTO update =
+                            new MaterialUpdateDTO(TechId.create(material), Arrays.asList(newIncomingMaterial.getProperties()),
+                                    material.getModificationDate());
+                    builder.materialUpdate(update);
+                }
+            }
+        }
+    }
+
+    private void processProjects(ResourceListParserData data, AtomicEntityOperationDetailsBuilder builder)
+    {
+        Map<String, ProjectWithConnections> projectsToProcess = data.getProjectsToProcess();
+        for (ProjectWithConnections prj : projectsToProcess.values())
+        {
+            NewProject incomingProject = prj.getProject();
+            if (prj.getLastModificationDate().after(lastSyncTimestamp))
+            {
+                Project project = null;
+                try
+                {
+                    project = service.tryGetProjectByPermId(incomingProject.getPermID());
+                } catch (Exception e)
+                {
+                    // TODO doing nothing because when the project with the perm is not found
+                    // an exception will be thrown. See bug report SSDM-4108
+                }
+
+                if (project == null)
+                {
+                    // ADD PROJECT
+                    builder.project(incomingProject);
+                }
+                else
+                {
+                    // UPDATE PROJECT
+                    builder.projectUpdate(createProjectUpdateDTO(incomingProject, project));
+                }
+            }
+            // handleProjectConnections(data, prj);
+        }
+    }
+
+    private void handleProjectConnections(ResourceListParserData data, ProjectWithConnections prj)
+    {
+        Map<String, ExperimentWithConnections> experimentsToProcess = data.getExperimentsToProcess();
+        for (Connection conn : prj.getConnections())
+        {
+            String connectedExpPermId = conn.getToPermId();
+            // TODO we need to do the same check for samples to support project samples
+            if (experimentsToProcess.containsKey(connectedExpPermId))
+            {
+                // the project is connected to an experiment
+                ExperimentWithConnections exp = experimentsToProcess.get(connectedExpPermId);
+                NewExperiment newExp = exp.getExperiment();
+                Experiment experiment = service.tryGetExperimentByPermId(connectedExpPermId);
+                // check if our local graph has the same connection
+                if (service.tryGetExperiment(ExperimentIdentifierFactory.parse(newExp.getIdentifier())) == null)
+                {
+                    // add new edge
+                    String oldIdentifier = newExp.getIdentifier();
+                    int index = oldIdentifier.lastIndexOf('/');
+                    String expCode = oldIdentifier.substring(index + 1);
+                    newExp.setIdentifier(prj.getProject().getIdentifier() + "/" + expCode);
+                    // add new experiment node
+                }
+            }
+            else
+            {
+                // This means the XML contains the connection but not the connected entity.
+                // This is an unlikely scenario.
+                operationLog.info("Connected experiment with permid : " + connectedExpPermId + " is missing");
+            }
+        }
+    }
+
+    private ProjectUpdatesDTO createProjectUpdateDTO(NewProject incomingProject, Project project)
+    {
+        ProjectUpdatesDTO prjUpdate = new ProjectUpdatesDTO();
+        prjUpdate.setVersion(project.getVersion());
+        prjUpdate.setTechId(TechId.create(project));
+        prjUpdate.setDescription(incomingProject.getDescription());
+        // TODO attachments????
+        prjUpdate.setAttachments(Collections.<NewAttachment> emptyList());
+        ProjectIdentifier projectIdentifier = ProjectIdentifierFactory.parse(incomingProject.getIdentifier());
+        prjUpdate.setSpaceCode(projectIdentifier.getSpaceCode());
+        return prjUpdate;
+    }
+
+    private void processSamples(ResourceListParserData data, AtomicEntityOperationDetailsBuilder builder)
+    {
+        // process samples
+        Map<String, SampleWithConnections> samplesToProcess = data.getSamplesToProcess();
+        Map<SampleIdentifier, NewSample> samplesToUpdate = new HashMap<SampleIdentifier, NewSample>();
+        Set<String> sampleWithUpdatedParents = new HashSet<String>();
+        for (SampleWithConnections sample : samplesToProcess.values())
+        {
+            NewSample incomingSample = sample.getSample();
+            if (sample.getLastModificationDate().after(lastSyncTimestamp))
+            {
+                SampleIdentifier sampleIdentifier = SampleIdentifierFactory.parse(incomingSample);
+                Sample sampleWithExperiment = null;
+                try
+                {
+                    sampleWithExperiment = service.tryGetSampleByPermId(incomingSample.getPermID());
+                } catch (Exception e)
+                {
+                    // doing nothing because when the sample with the perm is not found
+                    // an exception will be thrown. See the same issue for projects
+                }
+                if (sampleWithExperiment == null)
+                {
+                    // ADD SAMPLE
+                    builder.sample(incomingSample);
+                }
+                else
+                {
+                    // defer creation of sample update objects until all samples have been gone through;
+                    samplesToUpdate.put(sampleIdentifier, incomingSample);
+                    List<Sample> childSamples = getChildSamples(sampleWithExperiment);
+                    for (Sample child : childSamples)
+                    {
+                        String childSampleIdentifier = child.getIdentifier();// edgeNodePair.getNode().getIdentifier();
+                        SampleWithConnections childSampleWithConns = findChildInSamplesToProcess(childSampleIdentifier, samplesToProcess);
+                        if (childSampleWithConns == null)
+                        {
+                            // TODO Handle sample delete
+                        }
+                        else
+                        {
+                            // the childSample will appear in the incoming samples list anyway
+                            // but we want to make sure its parent modification is handled
+                            NewSample childSample = childSampleWithConns.getSample();
+                            sampleWithUpdatedParents.add(childSample.getIdentifier());
+                        }
+                    }
+                }
+            }
+            for (Connection conn : sample.getConnections())
+            {
+                if (conn.getType().equals("Component"))
+                {
+                    NewSample containedSample = samplesToProcess.get(conn.getToPermId()).getSample();
+                    containedSample.setContainerIdentifier(incomingSample.getIdentifier());
+                }
+                else if (conn.getType().equals("Child"))
+                {
+                    NewSample childSample = samplesToProcess.get(conn.getToPermId()).getSample();
+                    String[] parents = childSample.getParentsOrNull();
+                    List<String> parentIds = null;
+                    if (parents == null)
+                    {
+                        parentIds = new ArrayList<String>();
+                    }
+                    else
+                    {
+                        parentIds = new ArrayList<String>(Arrays.asList(parents));
+                    }
+                    parentIds.add(incomingSample.getIdentifier());
+                    childSample.setParentsOrNull(parentIds.toArray(new String[parentIds.size()]));
+                }
+                // TODO how about Connection Type
+                // else if (conn.getType().equals("Connection")) // TODO not sure if this guarantees that we have a dataset in the toPermId
+                // {
+                // NewExternalData externalData = dataSetsToCreate.get(conn.getToPermId()).getDataSet();
+                // externalData.setSampleIdentifierOrNull(new SampleIdentifier(newSmp.getIdentifier()));
+                // }
+            }
+        }
+
+        // create sample update dtos for the samples that need to be updated
+        for (SampleIdentifier sampleIdentifier : samplesToUpdate.keySet())
+        {
+            NewSample newSmp = samplesToUpdate.get(sampleIdentifier);
+            Sample sampleWithExperiment = service.tryGetSampleByPermId(newSmp.getPermID());
+
+            TechId sampleId = TechId.create(sampleWithExperiment);
+            ExperimentIdentifier experimentIdentifier = getExperimentIdentifier(newSmp);
+            ProjectIdentifier projectIdentifier = getProjectIdentifier(newSmp);
+            String[] modifiedParentIds = newSmp.getParentsOrNull();
+            if (modifiedParentIds == null)
+            {
+                if (sampleWithUpdatedParents.contains(newSmp.getIdentifier()))
+                {
+                    modifiedParentIds = new String[0];
+                }
+            }
+            String containerIdentifier = getContainerIdentifier(newSmp);
+
+            SampleUpdatesDTO updates =
+                    new SampleUpdatesDTO(sampleId, Arrays.asList(newSmp.getProperties()), experimentIdentifier, 
+                            projectIdentifier, Collections.<NewAttachment> emptyList(), 
+                            sampleWithExperiment.getVersion(), sampleIdentifier, containerIdentifier,
+                            modifiedParentIds);
+            builder.sampleUpdate(updates);
+        }
+    }
+
+    private String getContainerIdentifier(NewSample newSmp)
+    {
+        String containerIdentifier = newSmp.getContainerIdentifier();
+        return containerIdentifier == null ? null : containerIdentifier;
+    }
+
+    private ExperimentIdentifier getExperimentIdentifier(NewSample newSmp)
+    {
+        String expIdentifier = newSmp.getExperimentIdentifier();
+        if (expIdentifier == null)
+        {
+            return null;
+        }
+        return ExperimentIdentifierFactory.parse(expIdentifier);
+    }
+
+    private ProjectIdentifier getProjectIdentifier(NewSample sample)
+    {
+        String projectIdentifier = sample.getProjectIdentifier();
+        if (projectIdentifier == null)
+        {
+            return null;
+        }
+        return ProjectIdentifierFactory.parse(projectIdentifier);
+    }
+    private List<Sample> getChildSamples(Sample sampleWithExperiment)
+    {
+        ListSampleCriteria criteria = ListSampleCriteria.createForParent(new TechId(sampleWithExperiment.getId()));
+        return service.listSamples(criteria);
+    }
+
+    private SampleWithConnections findChildInSamplesToProcess(String childSampleIdentifier, Map<String, SampleWithConnections> samplesToProcess)
+    {
+        for (SampleWithConnections sample : samplesToProcess.values())
+        {
+            if (sample.getSample().getIdentifier().equals(childSampleIdentifier))
+            {
+                return sample;
+            }
+        }
+        return null;
+    }
+
+    private final class DataSetRegistrationTaskExecutor implements ITaskExecutor<DataSetWithConnections>
+    {
+
+        private List<String> notRegisteredDataSetCodes;
+
+        public DataSetRegistrationTaskExecutor(List<String> notRegisteredDataSetCodes)
+        {
+            this.notRegisteredDataSetCodes = notRegisteredDataSetCodes;
+        }
+
+        @Override
+        public Status execute(DataSetWithConnections dataSet)
+        {
+            System.out.println("start " + dataSet.getDataSet().getCode());
+
+            Properties props = setProperties();
+
+            DataSetRegistrationIngestionService ingestionService =
+                    new DataSetRegistrationIngestionService(props, storeRoot, notRegisteredDataSetCodes, dataSet.getDataSet(), operationLog);
+            TableModel resultTable = ingestionService.createAggregationReport(new HashMap<String, Object>(), context);
+            if (resultTable != null)
+            {
+                List<TableModelColumnHeader> headers = resultTable.getHeader();
+                String[] stringArray = new String[headers.size()];
+                for (int i = 0; i < stringArray.length; i++)
+                {
+                    if (headers.get(i).getTitle().startsWith("Error"))
+                    {
+                        String message = resultTable.getRows().get(0).getValues().toString();
+                        notRegisteredDataSetCodes.add(dataSet.getDataSet().getCode());
+                        operationLog.error(message);
+                        return Status.createError(message);
+                    }
+                }
+            }
+            return Status.OK;
+        }
+
+        private Properties setProperties()
+        {
+            Properties props = new Properties();
+            props.setProperty("user", EntitySynchronizer.this.config.getUser());
+            props.setProperty("pass", EntitySynchronizer.this.config.getPassword());
+            props.setProperty("as-url", EntitySynchronizer.this.config.getDataSourceOpenbisURL());
+            props.setProperty("dss-url", EntitySynchronizer.this.config.getDataSourceDSSURL());
+            props.setProperty("harvester-temp-dir", EntitySynchronizer.this.config.getHarvesterTempDir());
+            props.setProperty("do-not-create-original-dir", "true");
+            return props;
+        }
+    }
+
+    private boolean deepCompareDataSets(String dataSetCode)
+            throws NoSuchAlgorithmException, UnsupportedEncodingException
+    {
+        // get the file nodes in the incoming DS by querying the data source openbis
+        String asUrl = config.getDataSourceOpenbisURL();
+        String dssUrl = config.getDataSourceDSSURL();
+
+        DSSFileUtils dssFileUtils = DSSFileUtils.create(asUrl, dssUrl);
+        String sessionToken = dssFileUtils.login(config.getUser(), config.getPassword());
+
+        DataSetFileSearchCriteria criteria = new DataSetFileSearchCriteria();
+        criteria.withDataSet().withCode().thatEquals(dataSetCode);
+        SearchResult<DataSetFile> result = dssFileUtils.searchFiles(sessionToken, criteria, new DataSetFileFetchOptions());
+
+        // get the file nodes in the harvester openbis
+        IDataStoreServerApi dssharvester = (IDataStoreServerApi) ServiceProvider.getDssServiceV3().getService();
+        SearchResult<DataSetFile> resultHarvester =
+                dssharvester.searchFiles(ServiceProvider.getOpenBISService().getSessionToken(), criteria, new DataSetFileFetchOptions());
+        if (result.getTotalCount() != resultHarvester.getTotalCount())
+        {
+            return false;
+        }
+        List<DataSetFile> dsNodes = result.getObjects();
+        List<DataSetFile> harvesterNodes = resultHarvester.getObjects();
+        sortFileNodes(dsNodes);
+        sortFileNodes(harvesterNodes);
+        return calculateHash(dsNodes).equals(calculateHash(harvesterNodes));
+    }
+
+    private void sortFileNodes(List<DataSetFile> nodes)
+    {
+        Collections.sort(nodes, new Comparator<DataSetFile>()
+            {
+
+                @Override
+                public int compare(DataSetFile dsFile1, DataSetFile dsFile2)
+                {
+                    return dsFile1.getPath().compareTo(dsFile2.getPath());
+                }
+            });
+    }
+
+    private String calculateHash(List<DataSetFile> nodes) throws NoSuchAlgorithmException, UnsupportedEncodingException
+    {
+        StringBuffer sb = new StringBuffer();
+        for (DataSetFile dataSetFile : nodes)
+        {
+            sb.append(dataSetFile.getPath());
+            sb.append(dataSetFile.getChecksumCRC32());
+            sb.append(dataSetFile.getFileLength());
+        }
+        byte[] digest = MessageDigest.getInstance("MD5").digest(new String(sb).getBytes("UTF-8"));
+        return new String(Hex.encodeHex(digest));
+    }
+
+    private DataSetBatchUpdatesDTO createDataSetBatchUpdateDTO(NewExternalData childDS, AbstractExternalData dsInHarvester)
+    {
+        ch.systemsx.cisd.etlserver.registrator.api.v1.impl.DataSetUpdatable updateUpdatable = new
+                ch.systemsx.cisd.etlserver.registrator.api.v1.impl.DataSetUpdatable(dsInHarvester, service);
+        DataSetBatchUpdatesDTO dsBatchUpdatesDTO = ConversionUtils.convertToDataSetBatchUpdatesDTO(updateUpdatable);
+        dsBatchUpdatesDTO.setDatasetId(TechId.create(dsInHarvester));
+        List<IEntityProperty> entityProperties = new ArrayList<IEntityProperty>();
+        for (NewProperty prop : childDS.getDataSetProperties())
+        {
+            String propertyCode = prop.getPropertyCode();
+            String value = prop.getValue();
+            entityProperties.add(new PropertyBuilder(propertyCode).value(value).getProperty());
+        }
+        dsBatchUpdatesDTO.setProperties(entityProperties);
+        return dsBatchUpdatesDTO;
+    }
+
+    private IDataSetDirectoryProvider getDirectoryProvider()
+    {
+        return new DataSetDirectoryProvider(getConfigProvider().getStoreRoot(), getShareIdManager());
+    }
+
+    private IConfigProvider getConfigProvider()
+    {
+        return ServiceProvider.getConfigProvider();
+    }
+
+    private IShareIdManager getShareIdManager()
+    {
+        return ServiceProvider.getShareIdManager();
+    }
+
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/MasterDataParser.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/MasterDataParser.java
new file mode 100644
index 00000000000..24908c3698f
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/MasterDataParser.java
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.DataType;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IMasterDataRegistrationTransaction;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IPropertyAssignment;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IPropertyType;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.ISampleType;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IVocabulary;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IVocabularyTerm;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class MasterDataParser
+{
+    private final IMasterDataRegistrationTransaction masterDataRegistrationTransaction;
+    
+    private Map<String, IPropertyType> propertyTypeMap = new HashMap<String, IPropertyType>();
+
+    private Map<String, IVocabulary> vocabularyMap = new HashMap<String, IVocabulary>();
+
+    /**
+     * @param masterDataRegistrationTransaction
+     */
+    public MasterDataParser(IMasterDataRegistrationTransaction masterDataRegistrationTransaction)
+    {
+        this.masterDataRegistrationTransaction = masterDataRegistrationTransaction;
+    }
+
+    public void parseMasterData(Document doc, XPath xpath, String uri) throws XPathExpressionException
+    {
+        XPathExpression expr =
+                xpath.compile("//s:url/s:loc[normalize-space(.)='" + uri + "']//following-sibling::*[local-name() = 'masterData'][1]");
+        Node xdNode = (Node) expr.evaluate(doc, XPathConstants.NODE);
+        if (xdNode == null)
+        {
+            throw new XPathExpressionException("The master data resurce list should contain 1 master data element");
+        }
+        Element docElement = (Element) xdNode;
+
+        parseVocabularies(docElement.getElementsByTagName("vocabularies"));
+        parsePropertyTypes(docElement.getElementsByTagName("propertyTypes"));
+        parseSampleTypes(docElement.getElementsByTagName("sampleTypes"));
+    }
+
+    private void parseVocabularies(NodeList vocabulariesNode)
+    {
+        if (vocabulariesNode.getLength() == 1)
+        {
+            Element vocabsElement = (Element) vocabulariesNode.item(0);
+            NodeList vocabNodes = vocabsElement.getElementsByTagName("vocabulary");
+            for (int i = 0; i < vocabNodes.getLength(); i++)
+            {
+                Element vocabElement = (Element) vocabNodes.item(i);
+                String code = getAttribute(vocabElement, "code");
+                if (code.startsWith("$"))
+                    continue;
+                //TODO complete other attributes
+                IVocabulary newVocabulary = masterDataRegistrationTransaction.getOrCreateNewVocabulary(code);
+                newVocabulary.setDescription(getAttribute(vocabElement, "description"));
+                newVocabulary.setUrlTemplate(getAttribute(vocabElement, "urlTemplate"));
+                newVocabulary.setInternalNamespace(Boolean.valueOf(getAttribute(vocabElement, "internalNamespace")));
+                newVocabulary.setManagedInternally(Boolean.valueOf(getAttribute(vocabElement, "managedInternally")));
+                newVocabulary.setChosenFromList(Boolean.valueOf(getAttribute(vocabElement, "chosenFromList")));
+                vocabularyMap.put(code, newVocabulary);
+                parseVocabularyTerms(vocabElement, newVocabulary);
+            }
+        }
+    }
+
+    private void parseVocabularyTerms(Element vocabElement, IVocabulary newVocabulary)
+    {
+        NodeList termNodes = vocabElement.getElementsByTagName("term");
+        for (int i = 0; i < termNodes.getLength(); i++)
+        {
+            Element termElement = (Element) termNodes.item(i);
+            String code = getAttribute(termElement, "code");
+            IVocabularyTerm newVocabularyTerm = masterDataRegistrationTransaction.createNewVocabularyTerm(code);
+            newVocabularyTerm.setLabel(getAttribute(termElement, "label"));
+            newVocabularyTerm.setDescription(getAttribute(termElement, "description"));
+            newVocabularyTerm.setOrdinal(Long.valueOf(getAttribute(termElement, "ordinal")));
+            // TODO setUrl?
+            newVocabulary.addTerm(newVocabularyTerm);
+        }
+    }
+
+    private String getAttribute(Element termElement, String attr)
+    {
+        return termElement.getAttributes().getNamedItem(attr).getTextContent();
+    }
+
+    private void parseSampleTypes(NodeList sampleTypesNode)
+    {
+        if (sampleTypesNode.getLength() == 1)
+        {
+            Element sampleTypesElement = (Element) sampleTypesNode.item(0);
+            NodeList sampleTypeNodes = sampleTypesElement.getElementsByTagName("sampleType");
+            for (int i = 0; i < sampleTypeNodes.getLength(); i++)
+            {
+                Element sampleTypeElement = (Element) sampleTypeNodes.item(i);
+                String code = getAttribute(sampleTypeElement, "code");
+                ISampleType newSampleType = masterDataRegistrationTransaction.getOrCreateNewSampleType(code);
+                newSampleType.setGeneratedCodePrefix("S");
+
+                handlePropertyAssignments(newSampleType, sampleTypeElement.getElementsByTagName("propertyAssignments"));
+            }
+        }
+    }
+
+    private void handlePropertyAssignments(ISampleType newSampleType, NodeList propertyAssignmentsNode)
+    {
+        if (propertyAssignmentsNode.getLength() == 1)
+        {
+            Element propertyAssignmentsElement = (Element) propertyAssignmentsNode.item(0);
+            NodeList propertyAssignmentNodes = propertyAssignmentsElement.getElementsByTagName("propertyAssigment");
+            for (int i = 0; i < propertyAssignmentNodes.getLength(); i++)
+            {
+                Element propertyAssignmentElement = (Element) propertyAssignmentNodes.item(i);
+                // TODO set other attributes
+                String property_type_code = getAttribute(propertyAssignmentElement, "property_type_code");
+                String data_type_code = getAttribute(propertyAssignmentElement, "data_type_code");
+                if (property_type_code.startsWith("$"))
+                    continue;
+                boolean mandatory = Boolean.valueOf(getAttribute(propertyAssignmentElement, "mandatory"));
+                long ordinal = Long.valueOf(getAttribute(propertyAssignmentElement, "ordinal"));
+                String section = getAttribute(propertyAssignmentElement, "section");
+
+                if (propertyTypeMap.get(property_type_code) != null)
+                {
+                    IPropertyAssignment assignment =
+                            masterDataRegistrationTransaction.assignPropertyType(newSampleType, propertyTypeMap.get(property_type_code));
+                    assignment.setMandatory(mandatory);
+                    assignment.setSection(section);
+                }
+            }
+        }
+    }
+
+    private void parsePropertyTypes(NodeList propertyTypesNode)
+    {
+        if (propertyTypesNode.getLength() == 1)
+        {
+            Element propertyTypesElement = (Element) propertyTypesNode.item(0);
+            NodeList propertyTypeNodes = propertyTypesElement.getElementsByTagName("propertyType");
+            for (int i = 0; i < propertyTypeNodes.getLength(); i++)
+            {
+                Element propertyTypeElement = (Element) propertyTypeNodes.item(i);
+                String code = getAttribute(propertyTypeElement, "code");
+                // TODO handle internal properties
+                if (code.startsWith("$"))
+                    continue;
+                String label = getAttribute(propertyTypeElement, "label");
+                String dataType = getAttribute(propertyTypeElement, "dataType");
+                String description = getAttribute(propertyTypeElement, "description");
+                boolean internalNamespace = Boolean.valueOf(getAttribute(propertyTypeElement, "internalNamespace"));
+                boolean managedInternally = Boolean.valueOf(getAttribute(propertyTypeElement, "managedInternally"));
+                String vocabulary = null;
+                Node namedItem = propertyTypeElement.getAttributes().getNamedItem("vocabulary");
+                if (namedItem != null)
+                {
+                    vocabulary = namedItem.getTextContent();
+                }
+                
+                IPropertyType newPropertyType = masterDataRegistrationTransaction.getOrCreateNewPropertyType(code, DataType.valueOf(dataType));
+                propertyTypeMap.put(code, newPropertyType);
+                newPropertyType.setInternalNamespace(internalNamespace);
+                newPropertyType.setManagedInternally(managedInternally);
+                newPropertyType.setLabel(label);
+                newPropertyType.setDescription(description);
+                if (vocabulary != null)
+                {
+                    newPropertyType.setVocabulary(vocabularyMap.get(vocabulary));
+                }
+                // TODO handle the case for property types that are of data type material
+            }
+        }
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParser.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParser.java
new file mode 100644
index 00000000000..3b7e3f8b4a3
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParser.java
@@ -0,0 +1,519 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.xml.XMLConstants;
+import javax.xml.namespace.NamespaceContext;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.DataSetKind;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.Connection;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.DataSetWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.ExperimentWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.MaterialWithLastModificationDate;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.ProjectWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.ResourceListParserData.SampleWithConnections;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator.DefaultNameTranslator;
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator.INameTranslator;
+import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.IMasterDataRegistrationTransaction;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataSetType;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.EntityProperty;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IEntityProperty;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewAttachment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewExperiment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewMaterialWithType;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewProject;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewSample;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.PropertyType;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.SampleType;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewContainerDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewProperty;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ProjectIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier;
+
+/**
+ * @author Ganime Betul Akin
+ */
+public class ResourceListParser
+{
+    private final ResourceListParserData data;
+
+    private final INameTranslator nameTranslator;
+
+    private final String dataStoreCode;
+
+    private final IMasterDataRegistrationTransaction masterDataRegistrationTransaction;
+
+    public INameTranslator getNameTranslator()
+    {
+        return nameTranslator;
+    }
+
+    private ResourceListParser(INameTranslator nameTranslator, String dataStoreCode,
+            IMasterDataRegistrationTransaction masterDataRegistrationTransaction)
+    {
+        this.data = new ResourceListParserData();
+        this.nameTranslator = nameTranslator;
+        this.dataStoreCode = dataStoreCode;
+        this.masterDataRegistrationTransaction = masterDataRegistrationTransaction;
+    }
+
+    public static ResourceListParser create(INameTranslator nameTranslator, String dataStoreCode,
+            IMasterDataRegistrationTransaction masterDataRegistrationTransaction)
+    {
+        if (nameTranslator == null)
+        {
+            return create(dataStoreCode, masterDataRegistrationTransaction);
+        }
+        return new ResourceListParser(nameTranslator, dataStoreCode, masterDataRegistrationTransaction);
+    }
+
+    public static ResourceListParser create(String dataStoreCode, IMasterDataRegistrationTransaction masterDataRegistrationTransaction)
+    {
+        return create(new DefaultNameTranslator(), dataStoreCode, masterDataRegistrationTransaction);
+    }
+
+    public ResourceListParserData parseResourceListDocument(Document doc) throws XPathExpressionException
+    {
+        XPath xpath = XPathFactory.newInstance().newXPath();
+        xpath.setNamespaceContext(new NamespaceContext()
+            {
+                public String getNamespaceURI(String prefix)
+                {
+                    if (prefix == null)
+                        throw new NullPointerException("Null prefix");
+                    else if ("s".equals(prefix))
+                        return "http://www.sitemaps.org/schemas/sitemap/0.9";
+                    else if ("rs".equals(prefix))
+                        return "http://www.openarchives.org/rs/terms/";
+                    else if ("x".equals(prefix))
+                        return "https://sis.id.ethz.ch/software/#openbis/xdterms/";
+                    else if ("xml".equals(prefix))
+                        return XMLConstants.XML_NS_URI;
+                    return XMLConstants.NULL_NS_URI;
+                }
+
+                // This method isn't necessary for XPath processing.
+                public String getPrefix(String uri)
+                {
+                    throw new UnsupportedOperationException("Not implemented!!!");
+                }
+
+                // This method isn't necessary for XPath processing either.
+                public Iterator<?> getPrefixes(String uri)
+                {
+                    throw new UnsupportedOperationException("Not implemented!!!");
+                }
+            });
+        Date resourceListTimestamp = getResourceListTimestamp(doc, xpath);
+        data.setResourceListTimestamp(resourceListTimestamp);
+
+        List<String> uris = getResourceLocations(doc, xpath);
+        for (String uri : uris)
+        {
+            parseUriMetaData(doc, xpath, uri);
+        }
+
+        return data;
+    }
+
+    private Date getResourceListTimestamp(Document doc, XPath xpath) throws XPathExpressionException
+    {
+        XPathExpression expr = xpath.compile("*[name() = 'urlset']/*[name() = 'rs:md']");
+        Node mdNode = (Node) expr.evaluate(doc, XPathConstants.NODE);
+        String timestamp = mdNode.getAttributes().getNamedItem("at").getTextContent();
+        try
+        {
+            return convertFromW3CDate(timestamp);
+        } catch (ParseException e)
+        {
+            throw new XPathExpressionException("Last modification date cannot be parsed:" + timestamp);
+        }
+    }
+
+    private List<String> getResourceLocations(Document doc, XPath xpath) throws XPathExpressionException
+    {
+        XPathExpression expr = xpath.compile("/s:urlset/s:url/s:loc");// "//*[local-name()='loc']/text()"); //"//s:loc/text()"
+
+        Object result = expr.evaluate(doc, XPathConstants.NODESET);
+        NodeList nodes = (NodeList) result;
+        List<String> list = new ArrayList<String>();
+        for (int i = 0; i < nodes.getLength(); i++)
+        {
+            String uri = nodes.item(i).getTextContent();
+            if (uri.endsWith("MASTER_DATA/MASTER_DATA/M"))
+            {
+                parseMasterData(doc, xpath, uri);
+            }
+            else if (uri.endsWith("/M"))
+            {
+                list.add(uri);
+            }
+        }
+        return list;
+    }
+
+    private void parseMasterData(Document doc, XPath xpath, String uri) throws XPathExpressionException
+    {
+        MasterDataParser mdParser = new MasterDataParser(masterDataRegistrationTransaction);
+        mdParser.parseMasterData(doc, xpath, uri);
+    }
+
+    private void parseUriMetaData(Document doc, XPath xpath, String uri) throws XPathExpressionException
+    {
+        Date lastModificationDate = extractLastModificationDate(doc, xpath, uri);
+
+        Node xdNode = extractXdNode(doc, xpath, uri);
+        String entityKind = xdNode.getAttributes().getNamedItem("kind").getTextContent();
+
+        if ("PROJECT".equals(entityKind))
+        {
+            parseProjectMetaData(xpath, extractPermIdFromURI(uri), xdNode, lastModificationDate);
+        }
+        else if ("EXPERIMENT".equals(entityKind))
+        {
+            parseExperimentMetaData(xpath, extractPermIdFromURI(uri), xdNode, lastModificationDate);
+        }
+        else if ("SAMPLE".equals(entityKind))
+        {
+            parseSampleMetaData(xpath, extractPermIdFromURI(uri), xdNode, lastModificationDate);
+        }
+        else if ("DATA_SET".equals(entityKind))
+        {
+            parseDataSetMetaData(xpath, extractDataSetCodeFromURI(uri), xdNode, lastModificationDate);
+        }
+        else if ("MATERIAL".equals(entityKind))
+        {
+            parseMaterialMetaData(xpath, extractMaterialCodeFromURI(uri), xdNode, lastModificationDate);
+        }
+    }
+
+    private Date extractLastModificationDate(Document doc, XPath xpath, String uri) throws XPathExpressionException
+    {
+        XPathExpression expr = xpath.compile("//s:url/s:loc[normalize-space(.)='" + uri + "']//following-sibling::s:lastmod[1]");
+        Node lastModNode = (Node) expr.evaluate(doc, XPathConstants.NODE);
+        if (lastModNode == null)
+        {
+            throw new XPathExpressionException("The resource list should contain 1 lastmod element per resource");
+        }
+
+        String lastModDataStr = lastModNode.getTextContent().trim();
+        try
+        {
+            return convertFromW3CDate(lastModDataStr);
+        } catch (ParseException e)
+        {
+            throw new XPathExpressionException("Last modification date cannot be parsed:" + lastModDataStr);
+        }
+    }
+
+    private Date convertFromW3CDate(String lastModDataStr) throws ParseException
+    {
+        DateFormat df1 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
+        df1.setTimeZone(TimeZone.getTimeZone("GMT"));
+        return df1.parse(lastModDataStr);
+    }
+
+    private Node extractXdNode(Document doc, XPath xpath, String uri) throws XPathExpressionException
+    {
+        // alternative expression: //s:url/s:loc[normalize-space(.)='" + uri + "']/../x:xd");
+        XPathExpression expr = xpath.compile("//s:url/s:loc[normalize-space(.)='" + uri + "']//following-sibling::x:xd[1]");
+        Node xdNode = (Node) expr.evaluate(doc, XPathConstants.NODE);
+        if (xdNode == null)
+        {
+            throw new XPathExpressionException("The resource list should contain 1 xd element per resource");
+        }
+        return xdNode;
+    }
+
+    private void parseDataSetMetaData(XPath xpath, String permId, Node xdNode, Date lastModificationDate)
+    {
+        String code = extractCode(xdNode);
+        String sample = extractAttribute(xdNode, "sample");
+        String experiment = extractAttribute(xdNode, "experiment");
+        String type = extractType(xdNode);
+        String dsKind = extractAttribute(xdNode, "dsKind");
+        NewExternalData ds = new NewExternalData();
+        if (dsKind.equals(DataSetKind.CONTAINER.toString()))
+        {
+            ds = new NewContainerDataSet();
+            ds.setCode(code);
+            ds.setDataSetType(new DataSetType(type));
+            ds.setDataStoreCode(this.dataStoreCode);
+            if (sample.trim().equals("") == false)
+            {
+                ds.setSampleIdentifierOrNull(getSampleIdentifier(sample));
+            }
+            if (experiment.trim().equals("") == false)
+            {
+                ds.setExperimentIdentifierOrNull(getExperimentIdentifier(experiment));
+            }
+        }
+        else if (dsKind.equals(DataSetKind.PHYSICAL.toString()))
+        {
+            ds.setCode(code);
+            ds.setDataSetType(new DataSetType(type));
+            ds.setDataStoreCode(this.dataStoreCode);
+            if (sample.trim().equals("") == false)
+            {
+                ds.setSampleIdentifierOrNull(getSampleIdentifier(sample));
+            }
+            if (experiment.trim().equals("") == false)
+            {
+                ds.setExperimentIdentifierOrNull(getExperimentIdentifier(experiment));
+            }
+        }
+        DataSetWithConnections newDsWithConns = data.new DataSetWithConnections(ds, lastModificationDate);
+        data.getDataSetsToProcess().put(permId, newDsWithConns);
+        newDsWithConns.setConnections(parseConnections(xpath, xdNode));
+        ds.setDataSetProperties(parseDataSetProperties(xpath, xdNode));
+    }
+
+    private String extractAttribute(Node xdNode, String attrName)
+    {
+        return xdNode.getAttributes().getNamedItem(attrName).getTextContent();
+    }
+
+    private String extractCode(Node xdNode)
+    {
+        return extractAttribute(xdNode, "code");
+    }
+
+    private SampleIdentifier getSampleIdentifier(String sampleIdentifierStr)
+    {
+        SampleIdentifier sampleIdentifier = SampleIdentifierFactory.parse(sampleIdentifierStr);
+        SpaceIdentifier spaceLevel = sampleIdentifier.getSpaceLevel();
+        String originalSpaceCode = spaceLevel.getSpaceCode();
+        return new SampleIdentifier(new SpaceIdentifier(nameTranslator.translate(originalSpaceCode)), sampleIdentifier.getSampleCode());
+    }
+
+    private ExperimentIdentifier getExperimentIdentifier(String experiment)
+    {
+        ExperimentIdentifier experimentIdentifier = ExperimentIdentifierFactory.parse(experiment);
+        String originalSpaceCode = experimentIdentifier.getSpaceCode();
+        String projectCode = experimentIdentifier.getProjectCode();
+        String expCode = experimentIdentifier.getExperimentCode();
+        return new ExperimentIdentifier(new ProjectIdentifier(nameTranslator.translate(originalSpaceCode), projectCode), expCode);
+    }
+
+    private void parseProjectMetaData(XPath xpath, String permId, Node xdNode, Date lastModificationDate)
+    {
+
+        String code = extractCode(xdNode);
+        String desc = xdNode.getAttributes().getNamedItem("desc").getTextContent();
+        String space = extractSpace(xdNode);
+        // TODO is there a better way to create project identifier below?
+        NewProject newProject = new NewProject("/" + nameTranslator.translate(space) + "/" + code, desc);
+        newProject.setPermID(permId);
+        ProjectWithConnections newPrjWithConns =
+                data.new ProjectWithConnections(newProject, lastModificationDate);
+        data.getProjectsToProcess().put(permId, newPrjWithConns);
+        newPrjWithConns.setConnections(parseConnections(xpath, xdNode));
+    }
+
+    private void parseMaterialMetaData(XPath xpath, String permId, Node xdNode, Date lastModificationDate)
+    {
+        String code = extractCode(xdNode);
+        String type = extractType(xdNode);
+        NewMaterialWithType newMaterial = new NewMaterialWithType(code, type);
+        MaterialWithLastModificationDate materialWithLastModDate =
+                data.new MaterialWithLastModificationDate(newMaterial, lastModificationDate);
+        data.getMaterialsToProcess().put(code, materialWithLastModDate);
+        newMaterial.setProperties(parseProperties(xpath, xdNode));
+    }
+
+    private List<Connection> parseConnections(XPath xpath, Node xdNode)
+    {
+        List<Connection> conns = new ArrayList<Connection>();
+        Element docElement = (Element) xdNode;
+        NodeList connsNode = docElement.getElementsByTagName("x:connections");
+        if (connsNode.getLength() == 1)
+        {
+            Element connsElement = (Element) connsNode.item(0);
+            NodeList connNodes = connsElement.getElementsByTagName("x:connection");
+            for (int i = 0; i < connNodes.getLength(); i++)
+            {
+                String to = connNodes.item(i).getAttributes().getNamedItem("to").getTextContent();
+                String type = connNodes.item(i).getAttributes().getNamedItem("type").getTextContent();
+                conns.add(data.new Connection(to, type));
+            }
+        }
+        return conns;
+    }
+
+    private List<NewProperty> parseDataSetProperties(XPath xpath, Node xdNode)
+    {
+
+        List<NewProperty> dsProperties = new ArrayList<NewProperty>();
+        Element docElement = (Element) xdNode;
+        NodeList propsNode = docElement.getElementsByTagName("x:properties");
+        if (propsNode.getLength() == 1)
+        {
+            Element propsElement = (Element) propsNode.item(0);
+            NodeList propertyNodes = propsElement.getElementsByTagName("x:property");
+            for (int i = 0; i < propertyNodes.getLength(); i++)
+            {
+                Element propertyElement = (Element) propertyNodes.item(i);
+                // TODO proper error handling needed below in case the XML is not correct and item 0 does not exist
+                String code = propertyElement.getElementsByTagName("x:code").item(0).getTextContent();
+                String val = propertyElement.getElementsByTagName("x:value").item(0).getTextContent();
+                dsProperties.add(new NewProperty(code, val));
+            }
+        }
+        return dsProperties;
+    }
+
+    private EntityProperty[] parseProperties(XPath xpath, Node xdNode)
+    {
+
+        List<EntityProperty> entityProperties = new ArrayList<EntityProperty>();
+        Element docElement = (Element) xdNode;
+        NodeList propsNode = docElement.getElementsByTagName("x:properties");
+        if (propsNode.getLength() == 1)
+        {
+            Element propsElement = (Element) propsNode.item(0);
+            NodeList propertyNodes = propsElement.getElementsByTagName("x:property");
+            for (int i = 0; i < propertyNodes.getLength(); i++)
+            {
+                Element propertyElement = (Element) propertyNodes.item(i);
+                // TODO proper error handling needed below in case the XML is not correct and item 0 does not exist
+                String code = propertyElement.getElementsByTagName("x:code").item(0).getTextContent();
+                String val = propertyElement.getElementsByTagName("x:value").item(0).getTextContent();
+                EntityProperty property = createEntityProperty(code, val);
+                entityProperties.add(property);
+            }
+        }
+        return entityProperties.toArray(new EntityProperty[entityProperties.size()]);
+    }
+
+    private EntityProperty createEntityProperty(String code, String val)
+    {
+        EntityProperty property = new EntityProperty();
+        PropertyType propertyType = new PropertyType();
+        propertyType.setCode(code);
+        property.setPropertyType(propertyType);
+        property.setValue(val);
+        return property;
+    }
+
+    private void parseExperimentMetaData(XPath xpath, String permId, Node xdNode, Date lastModificationDate)
+    {
+        String code = extractCode(xdNode);
+        String type = extractType(xdNode);
+        String project = extractAttribute(xdNode, "project");
+        String space = extractSpace(xdNode);
+        NewExperiment newExp = new NewExperiment("/" + nameTranslator.translate(space) + "/" + project + "/" + code, type);
+        newExp.setPermID(permId);
+        ExperimentWithConnections newExpWithConns = data.new ExperimentWithConnections(newExp, lastModificationDate);
+        data.getExperimentsToProcess().put(permId, newExpWithConns);
+        newExpWithConns.setConnections(parseConnections(xpath, xdNode));
+        newExp.setProperties(parseProperties(xpath, xdNode));
+    }
+
+    private void parseSampleMetaData(XPath xpath, String permId, Node xdNode, Date lastModificationDate)
+    {
+        String code = extractCode(xdNode);
+        String type = extractType(xdNode);
+        String experiment = extractAttribute(xdNode, "experiment");
+        String space = extractSpace(xdNode);
+        SampleType sampleType = new SampleType();
+        sampleType.setCode(type);
+
+        NewSample newSample = new NewSample("/" + nameTranslator.translate(space) + "/" + code, sampleType, null, null,
+                experiment.trim().equals("") ? null : experiment, null, null, new IEntityProperty[0],
+                new ArrayList<NewAttachment>());
+        newSample.setPermID(permId);
+        SampleWithConnections newSampleWithConns = data.new SampleWithConnections(newSample, lastModificationDate);
+        data.getSamplesToProcess().put(permId, newSampleWithConns);
+        newSampleWithConns.setConnections(parseConnections(xpath, xdNode));
+        newSample.setProperties(parseProperties(xpath, xdNode));
+    }
+
+    private String extractType(Node xdNode)
+    {
+        return extractAttribute(xdNode, "type");
+    }
+
+    private String extractSpace(Node xdNode)
+    {
+        String space = extractAttribute(xdNode, "space");
+        data.getHarvesterSpaceList().add(nameTranslator.translate(space));
+        return space;
+    }
+
+    private String extractPermIdFromURI(String uri) throws XPathExpressionException
+    {
+        Pattern pattern = Pattern.compile("([0-9\\-]{17,})");
+        Matcher matcher = pattern.matcher(uri);
+        if (matcher.find())
+        {
+            return matcher.group(1);
+        }
+        throw new XPathExpressionException("Malformed resource url");
+    }
+
+    private String extractDataSetCodeFromURI(String uri) throws XPathExpressionException
+    {
+        Pattern pattern = Pattern.compile("(?<=DATA_SET\\/)(.*)(?=\\/M)");
+        Matcher matcher = pattern.matcher(uri);
+        if (matcher.find())
+        {
+            return matcher.group(1);
+        }
+        throw new XPathExpressionException("Malformed resource url");
+    }
+
+    private String extractMaterialCodeFromURI(String uri) throws XPathExpressionException
+    {
+        // TODO malformed uri handling
+        String[] parts = uri.split("/");
+        return parts[parts.length - 2];
+    }
+}
+// expr = xpath.compile("//s:loc[normalize-space(.) ='" + uri + "']/../x:xd/x:properties/x:property"); //
+// "//*/text()[normalize-space(.)='" + loc_text + "']/parent::/rs:ln");// "/s:urlset/s:url[/s:loc/text() = '" +
+// for (int ji = 0; ji < url_node.getLength(); ji++)
+// {
+// System.out.print(url_node.item(ji).getNodeName() + ":" + url_node.item(ji).getAttributes().getNamedItem("kind"));
+// System.out.println();
+// }
+
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParserData.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParserData.java
new file mode 100644
index 00000000000..b73fc512d1e
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/ResourceListParserData.java
@@ -0,0 +1,356 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.DataSetKind;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewExperiment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewMaterialWithType;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewProject;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.NewSample;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewContainerDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.NewLinkDataSet;
+
+/**
+ * @author Ganime Betul Akin
+ */
+public class ResourceListParserData
+{
+    // Introduced to store the timestamp of any still-running transaction on the data source at the time of
+    // retrieving the resource list.
+    private Date resourceListTimestamp;
+
+    private Set<String> harvesterSpaceList = new HashSet<>();
+
+    private Map<String, ProjectWithConnections> projectsToProcess = new HashMap<String, ResourceListParserData.ProjectWithConnections>();
+
+    private Map<String, ExperimentWithConnections> experimentsToProcess = new HashMap<String, ResourceListParserData.ExperimentWithConnections>();
+
+    private Map<String, SampleWithConnections> samplesToProcess = new HashMap<String, ResourceListParserData.SampleWithConnections>();
+
+    private Map<String, DataSetWithConnections> dataSetsToProcess = new HashMap<String, ResourceListParserData.DataSetWithConnections>();
+
+    private Map<String, MaterialWithLastModificationDate> materialsToProcess = new HashMap<String, MaterialWithLastModificationDate>();
+
+    public Date getResourceListTimestamp()
+    {
+        return resourceListTimestamp;
+    }
+
+    public void setResourceListTimestamp(Date resourceListTimestamp)
+    {
+        this.resourceListTimestamp = resourceListTimestamp;
+    }
+
+    public Set<String> getHarvesterSpaceList()
+    {
+        return harvesterSpaceList;
+    }
+
+    public Map<String, ProjectWithConnections> getProjectsToProcess()
+    {
+        return projectsToProcess;
+    }
+
+    public Map<String, ExperimentWithConnections> getExperimentsToProcess()
+    {
+        return experimentsToProcess;
+    }
+
+    public Map<String, SampleWithConnections> getSamplesToProcess()
+    {
+        return samplesToProcess;
+    }
+
+    public Map<String, DataSetWithConnections> getDataSetsToProcess()
+    {
+        return dataSetsToProcess;
+    }
+
+    public Map<String, MaterialWithLastModificationDate> getMaterialsToProcess()
+    {
+        return materialsToProcess;
+    }
+
+    public Map<String, DataSetWithConnections> filterPhysicalDataSetsByLastModificationDate(Date lastSyncDate, Set<String> dataSetsCodesToRetry)
+    {
+        Map<String, DataSetWithConnections> dsMap = new HashMap<String, ResourceListParserData.DataSetWithConnections>();
+        for (String permId : dataSetsToProcess.keySet())
+        {
+            DataSetWithConnections ds = dataSetsToProcess.get(permId);
+            if (ds.getKind() == DataSetKind.PHYSICAL
+                    && (ds.lastModificationDate.after(lastSyncDate) || dataSetsCodesToRetry.contains(ds.getDataSet().getCode())))
+            {
+                dsMap.put(permId, ds);
+            }
+        }
+        return dsMap;
+    }
+
+    public Map<String, DataSetWithConnections> filterContainerDataSets()
+    {
+        // List<NewDataSetWithConnections> dsList = new ArrayList<ResourceListParserData.NewDataSetWithConnections>();
+        Map<String, DataSetWithConnections> dsMap = new HashMap<String, ResourceListParserData.DataSetWithConnections>();
+        for (String permId : dataSetsToProcess.keySet())
+        {
+            DataSetWithConnections ds = dataSetsToProcess.get(permId);
+            if (ds.getKind() == DataSetKind.CONTAINER)
+            {
+                dsMap.put(permId, ds);
+            }
+        }
+        return dsMap;
+    }
+
+    class ProjectWithConnections
+    {
+        private final NewProject project;
+
+        private final Date lastModificationDate;
+
+        public NewProject getProject()
+        {
+            return project;
+        }
+
+        private List<Connection> connections = new ArrayList<Connection>();
+
+        public List<Connection> getConnections()
+        {
+            return connections;
+        }
+
+        ProjectWithConnections(NewProject project, Date lastModDate)
+        {
+            this.project = project;
+            this.lastModificationDate = lastModDate;
+        }
+
+        public Date getLastModificationDate()
+        {
+            return lastModificationDate;
+        }
+
+        void addConnection(Connection conn)
+        {
+            this.connections.add(conn);
+        }
+
+        public void setConnections(List<Connection> conns)
+        {
+            // TODO do this better
+            this.connections = conns;
+        }
+    }
+
+    class ExperimentWithConnections
+    {
+        private final NewExperiment experiment;
+
+        private final Date lastModificationDate;
+
+        public NewExperiment getExperiment()
+        {
+            return experiment;
+        }
+
+        public List<Connection> getConnections()
+        {
+            return connections;
+        }
+
+        private List<Connection> connections = new ArrayList<Connection>();
+
+        ExperimentWithConnections(NewExperiment exp, Date lastModDate)
+        {
+            this.experiment = exp;
+            this.lastModificationDate = lastModDate;
+        }
+
+        public Date getLastModificationDate()
+        {
+            return lastModificationDate;
+        }
+
+        public void setConnections(List<Connection> conns)
+        {
+            // TODO do this better
+            this.connections = conns;
+        }
+    }
+
+    class SampleWithConnections
+    {
+        private final NewSample sample;
+
+        private final Date lastModificationDate;
+
+        public Date getLastModificationDate()
+        {
+            return lastModificationDate;
+        }
+
+        public NewSample getSample()
+        {
+            return sample;
+        }
+
+        SampleWithConnections(NewSample sample, Date lastModDate)
+        {
+            super();
+            this.sample = sample;
+            this.lastModificationDate = lastModDate;
+        }
+
+        private List<Connection> connections = new ArrayList<Connection>();
+
+        public List<Connection> getConnections()
+        {
+            return connections;
+        }
+
+        public void setConnections(List<Connection> conns)
+        {
+            // TODO do this better
+            this.connections = conns;
+        }
+    }
+
+    class DataSetWithConnections implements Serializable
+    {
+        /**
+         * 
+         */
+        private static final long serialVersionUID = 1L;
+        private final NewExternalData dataSet;
+
+        private final Date lastModificationDate;
+
+        public Date getLastModificationDate()
+        {
+            return lastModificationDate;
+        }
+
+        public DataSetKind getKind()
+        {
+            if (dataSet instanceof NewContainerDataSet)
+                return DataSetKind.CONTAINER;
+            else if (dataSet instanceof NewLinkDataSet)
+                return DataSetKind.LINK;
+            return DataSetKind.PHYSICAL;
+        }
+
+        public NewExternalData getDataSet()
+        {
+            return dataSet;
+        }
+
+        DataSetWithConnections(NewExternalData dataSet, Date lastModDate)
+        {
+            super();
+            this.dataSet = dataSet;
+            this.lastModificationDate = lastModDate;
+        }
+
+        private List<Connection> connections = new ArrayList<Connection>();
+
+        public List<Connection> getConnections()
+        {
+            return connections;
+        }
+
+        public void setConnections(List<Connection> conns)
+        {
+            // TODO do this better
+            this.connections = conns;
+        }
+    }
+
+    class Connection
+    {
+        final String toPermId;
+
+        final String connType;
+
+        public String getType()
+        {
+            return connType;
+        }
+
+        Connection(String toPermId, String connType)
+        {
+            super();
+            this.toPermId = toPermId;
+            this.connType = connType;
+        }
+
+        public String getToPermId()
+        {
+            return toPermId;
+        }
+    }
+
+    enum ConnectionType
+    {
+        SIMPLE_CONNECTION("Connection"),
+        PARENT_CHILD_RELATIONSHIP("Child"),
+        CONTAINER_COMPONENT_RELATIONSHIP("Component");
+
+        private final String type;
+
+        public String getType()
+        {
+            return type;
+        }
+
+        private ConnectionType(String type)
+        {
+            this.type = type;
+        }
+    }
+
+    class MaterialWithLastModificationDate
+    {
+        private final NewMaterialWithType material;
+
+        private final Date lastModificationDate;
+
+        public NewMaterialWithType getMaterial()
+        {
+            return material;
+        }
+
+        MaterialWithLastModificationDate(NewMaterialWithType material, Date lastModDate)
+        {
+            this.material = material;
+            this.lastModificationDate = lastModDate;
+        }
+
+        public Date getLastModificationDate()
+        {
+            return lastModificationDate;
+        }
+    }
+}
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/DataSourceConnector.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/DataSourceConnector.java
new file mode 100644
index 00000000000..41229a7b258
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/DataSourceConnector.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.datasourceconnector;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+
+import org.eclipse.jetty.client.HttpClient;
+import org.eclipse.jetty.client.api.AuthenticationStore;
+import org.eclipse.jetty.client.api.ContentResponse;
+import org.eclipse.jetty.client.api.Request;
+import org.eclipse.jetty.client.util.BasicAuthentication;
+import org.eclipse.jetty.http.HttpStatus;
+import org.w3c.dom.Document;
+import org.xml.sax.SAXException;
+
+import ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.config.BasicAuthCredentials;
+import ch.systemsx.cisd.common.http.JettyHttpClientFactory;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class DataSourceConnector implements IDataSourceConnector
+{
+    private final String dataSourceUrl;
+
+    private final BasicAuthCredentials authCredentials;
+
+    public DataSourceConnector(String url, BasicAuthCredentials authCredentials)
+    {
+        this.dataSourceUrl = url;
+        this.authCredentials = authCredentials;
+    }
+
+    public Document getResourceListAsXMLDoc(List<String> spaceBlackList) throws Exception
+    {
+        HttpClient client = JettyHttpClientFactory.getHttpClient();
+        addAuthenticationCredentials(client);
+        Request requestEntity = createNewHttpRequest(client, spaceBlackList);
+        ContentResponse contentResponse = getResponse(requestEntity);
+        return parseResponse(contentResponse);
+    }
+
+    private Document parseResponse(ContentResponse contentResponse) throws ParserConfigurationException, SAXException, IOException
+    {
+        DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
+        domFactory.setNamespaceAware(true);
+        byte[] content = contentResponse.getContent();
+        ByteArrayInputStream bis = new ByteArrayInputStream(content);
+        DocumentBuilder builder = domFactory.newDocumentBuilder();
+        Document doc = builder.parse(bis);
+        return doc;
+    }
+
+    private ContentResponse getResponse(Request requestEntity) throws InterruptedException, TimeoutException, ExecutionException, IOException
+    {
+        ContentResponse contentResponse;
+        contentResponse = requestEntity.send();
+        int statusCode = contentResponse.getStatus();
+
+        if (statusCode != HttpStatus.Code.OK.getCode())
+        {
+            throw new IOException("Resource List could not be retrieved: " + contentResponse.getContentAsString());
+        }
+        return contentResponse;
+    }
+
+    private Request createNewHttpRequest(HttpClient client, List<String> spaceBlackList)
+    {
+        StringBuffer sb = new StringBuffer();
+        for (String dataSourceSpace : spaceBlackList)
+        {
+            sb.append(dataSourceSpace + ",");
+        }
+        String req = dataSourceUrl + "?verb=resourcelist.xml";
+        if (sb.length() != 0)
+        {
+            String str = sb.toString();
+            str = str.substring(0, str.length() - 1);
+            req += "&black_list=" + str;
+        }
+        Request requestEntity = client.newRequest(req).method("GET");
+        return requestEntity;
+    }
+
+    private void addAuthenticationCredentials(HttpClient client) throws URISyntaxException
+    {
+        AuthenticationStore auth = client.getAuthenticationStore();
+        auth.addAuthentication(new BasicAuthentication(new URI(dataSourceUrl), authCredentials.getRealm(), authCredentials.getUser(), authCredentials
+                .getPassword()));
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/IDataSourceConnector.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/IDataSourceConnector.java
new file mode 100644
index 00000000000..f5a40af0744
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/datasourceconnector/IDataSourceConnector.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.datasourceconnector;
+
+import java.util.List;
+
+import org.w3c.dom.Document;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public interface IDataSourceConnector
+{
+    public Document getResourceListAsXMLDoc(List<String> spaceBlackList) throws Exception;
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/CustomNameTranslator.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/CustomNameTranslator.java
new file mode 100644
index 00000000000..9cf1ad09281
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/CustomNameTranslator.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator;
+
+import java.util.HashMap;
+
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class CustomNameTranslator implements INameTranslator
+{
+
+    private final HashMap<String, String> spaceMappings;
+
+    public CustomNameTranslator(HashMap<String, String> spaceMappings)
+    {
+        this.spaceMappings = spaceMappings;
+    }
+
+    @Override
+    public String translate(String name)
+    {
+        if (spaceMappings == null)
+        {
+            throw new ConfigurationFailureException("Space mappings cannot be null");
+        }
+        String newName = spaceMappings.get(name);
+        if (newName == null)
+        {
+            throw new ConfigurationFailureException("No corresponding mapping found for '" + name + "'");
+        }
+        return newName;
+    }
+
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/DefaultNameTranslator.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/DefaultNameTranslator.java
new file mode 100644
index 00000000000..8c7916ac30b
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/DefaultNameTranslator.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class DefaultNameTranslator implements INameTranslator
+{
+
+    @Override
+    public String translate(String name)
+    {
+        return name;
+    }
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/INameTranslator.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/INameTranslator.java
new file mode 100644
index 00000000000..4ef2b498835
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/INameTranslator.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator;
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public interface INameTranslator
+{
+    String translate(String name);
+
+}
diff --git a/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/PrefixBasedNameTranslator.java b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/PrefixBasedNameTranslator.java
new file mode 100644
index 00000000000..7c18825fc9b
--- /dev/null
+++ b/datastore_server/source/java/ch/ethz/sis/openbis/generic/server/dss/plugins/sync/harvester/synchronizer/translator/PrefixBasedNameTranslator.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2016 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.ethz.sis.openbis.generic.server.dss.plugins.sync.harvester.synchronizer.translator;
+
+/**
+ * 
+ *
+ * @author Ganime Betul Akin
+ */
+public class PrefixBasedNameTranslator implements INameTranslator
+{
+    private final String prefix;
+
+    public PrefixBasedNameTranslator(String prefix)
+    {
+        this.prefix = prefix;
+    }
+
+    @Override
+    public String translate(String name)
+    {
+        return prefix + "_" + name;
+    }
+
+}
-- 
GitLab