diff --git a/datastore_server/source/core-plugins/multi-dataset-archiver/1/dss/data-sources/multi-dataset-archiver-db/plugin.properties b/datastore_server/source/core-plugins/multi-dataset-archiver/1/dss/data-sources/multi-dataset-archiver-db/plugin.properties
new file mode 100644
index 0000000000000000000000000000000000000000..a9c89d3dd437ff85105e5a93425205b86294d97b
--- /dev/null
+++ b/datastore_server/source/core-plugins/multi-dataset-archiver/1/dss/data-sources/multi-dataset-archiver-db/plugin.properties
@@ -0,0 +1,8 @@
+version-holder-class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDatasetArchiverDBVersionHolder
+databaseEngineCode = postgresql
+basicDatabaseName = multi_dataset_archive
+urlHostPart = ${multi-dataset-archive-database.url-host-part:localhost}
+databaseKind = ${multi-dataset-archive-database.kind:prod}
+scriptFolder = ${multi-dataset-archive-sql-root-folder:}
+owner = ${multi-dataset-archive-database.owner:}
+password = ${multi-dataset-archive-database.password:}
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
index 6924297f5e2b34a9f9146b575ed6ac8caa81d173..085532b1eb5b540ee1366d01b53c9e076ccfe6dd 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
@@ -22,6 +22,7 @@ import static ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataSetArchiving
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
@@ -104,7 +105,7 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
     /**
      * Total size in bytes of data sets processed in a single batch of archiver.
      */
-    private final int maximumBatchSizeInBytes;
+    private final long maximumBatchSizeInBytes;
 
     public AbstractArchiverProcessingPlugin(Properties properties, File storeRoot,
             IStatusChecker archivePrerequisiteOrNull, IStatusChecker unarchivePrerequisiteOrNull)
@@ -113,7 +114,7 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
         this.archivePrerequisiteOrNull = archivePrerequisiteOrNull;
         this.unarchivePrerequisiteOrNull = unarchivePrerequisiteOrNull;
         this.synchronizeArchive = PropertyUtils.getBoolean(properties, SYNCHRONIZE_ARCHIVE, true);
-        this.maximumBatchSizeInBytes = PropertyUtils.getInt(properties, BATCH_SIZE_IN_BYTES, 1024 * 1024 * 1024);
+        this.maximumBatchSizeInBytes = PropertyUtils.getLong(properties, BATCH_SIZE_IN_BYTES, 1024L * 1024 * 1024);
         this.tempFolder = PropertyUtils.getDirectory(properties, TEMP_FOLDER, null);
     }
 
@@ -167,7 +168,7 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
             return createStatuses(errorStatus, datasets, Operation.ARCHIVE).getProcessingStatus();
         }
 
-        for (List<DatasetDescription> datasetGroup : splitIntoGroups(datasets, maximumBatchSizeInBytes))
+        for (List<DatasetDescription> datasetGroup : splitIntoGroups(datasets))
         {
             DatasetProcessingStatuses statuses = archiveSingleBatch(context, removeFromDataStore, finalstatuses, datasetGroup);
             finalstatuses.addResults(statuses);
@@ -176,7 +177,7 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
         return finalstatuses.getProcessingStatus();
     }
 
-    private List<List<DatasetDescription>> splitIntoGroups(List<DatasetDescription> datasets, long minGroupSize)
+    protected List<List<DatasetDescription>> splitIntoGroups(List<DatasetDescription> datasets)
     {
         List<List<DatasetDescription>> results = new LinkedList<List<DatasetDescription>>();
 
@@ -185,13 +186,21 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
         long runningSum = 0;
         for (DatasetDescription dataset : datasets)
         {
-            currentResult.add(dataset);
-            runningSum += dataset.getDataSetSize();
-            if (runningSum > minGroupSize)
+            if (dataset.getDataSetSize() > maximumBatchSizeInBytes)
             {
-                results.add(currentResult);
-                runningSum = 0;
-                currentResult = new LinkedList<DatasetDescription>();
+                results.add(Collections.singletonList(dataset));
+            }
+            else
+            {
+                currentResult.add(dataset);
+                runningSum += dataset.getDataSetSize();
+
+                if (runningSum > maximumBatchSizeInBytes)
+                {
+                    results.add(currentResult);
+                    runningSum = 0;
+                    currentResult = new LinkedList<DatasetDescription>();
+                }
             }
         }
         if (false == currentResult.isEmpty())
@@ -734,4 +743,9 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
 
     }
 
+    public long getMaximumBatchSizeInBytes()
+    {
+        return maximumBatchSizeInBytes;
+    }
+
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DataSetFileOperationsManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DataSetFileOperationsManager.java
index b7dc25926fe3890270e3472d672dc964b6e65077..9eec61cdaf7917064d8dcab5f95789752ad42a00 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DataSetFileOperationsManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DataSetFileOperationsManager.java
@@ -31,6 +31,8 @@ import ch.systemsx.cisd.common.properties.PropertyUtils;
 import ch.systemsx.cisd.openbis.common.io.hierarchical_content.DefaultFileBasedHierarchicalContentFactory;
 import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContent;
 import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.AbstractDataSetFileOperationsManager;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.ArchiveDestination;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.ArchiveDestinationFactory;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IDatasetLocation;
 import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
 
@@ -51,7 +53,7 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     private final static Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION,
             DataSetFileOperationsManager.class);
 
-    private final ArchiveDestination archiveDestinationManager;
+    private final ArchiveDestination archiveDestination;
 
     public DataSetFileOperationsManager(Properties properties,
             IPathCopierFactory pathCopierFactory,
@@ -63,8 +65,9 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
 
         String hostFile = PropertyUtils.getMandatoryProperty(properties, DESTINATION_KEY);
 
-        this.archiveDestinationManager =
-                createArchiveDestinationManager(properties, pathCopierFactory, sshCommandExecutorFactory, hostFile, timeoutInMillis);
+        ArchiveDestinationFactory factory =
+                new ArchiveDestinationFactory(properties, pathCopierFactory, sshCommandExecutorFactory, hostFile, timeoutInMillis);
+        this.archiveDestination = factory.createArchiveDestination();
     }
 
     /**
@@ -76,18 +79,18 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     {
         try
         {
-            File destinationFolder = new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation());
+            File destinationFolder = new File(archiveDestination.getDestination(), dataset.getDataSetLocation());
             if (createFolderIfNotExists(destinationFolder.getParentFile())
                     || destinationExists(destinationFolder).isSuccess() == false)
             {
                 operationLog.info("Copy dataset '" + dataset.getDataSetCode() + "' from '"
                         + originalData.getPath() + "' to '" + destinationFolder.getParentFile());
-                archiveDestinationManager.getExecutor().copyDataSetToDestination(originalData, destinationFolder.getParentFile());
+                archiveDestination.getExecutor().copyDataSetToDestination(originalData, destinationFolder.getParentFile());
             } else
             {
                 operationLog.info("Update dataset '" + dataset.getDataSetCode() + "' from '"
                         + originalData.getPath() + "' to '" + destinationFolder.getParentFile());
-                archiveDestinationManager.getExecutor().syncDataSetWithDestination(originalData, destinationFolder.getParentFile());
+                archiveDestination.getExecutor().syncDataSetWithDestination(originalData, destinationFolder.getParentFile());
             }
 
             return Status.OK;
@@ -106,13 +109,13 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     {
         try
         {
-            File destinationFolder = new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation());
+            File destinationFolder = new File(archiveDestination.getDestination(), dataset.getDataSetLocation());
             checkDestinationExists(destinationFolder);
             File folder = originalData.getParentFile();
             operationLog.info("Retrieve data set '" + dataset.getDataSetCode() + "' from '"
                     + destinationFolder.getPath() + "' to '" + folder);
             folder.mkdirs();
-            archiveDestinationManager.getExecutor().retrieveDataSetFromDestination(folder, destinationFolder);
+            archiveDestination.getExecutor().retrieveDataSetFromDestination(folder, destinationFolder);
             return Status.OK;
         } catch (ExceptionWithStatus ex)
         {
@@ -138,7 +141,7 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
                 @Override
                 public void delete(File dataSetFolder, String dataSetCode)
                 {
-                    archiveDestinationManager.getExecutor().deleteFolder(dataSetFolder);
+                    archiveDestination.getExecutor().deleteFolder(dataSetFolder);
                 }
             });
     }
@@ -158,10 +161,10 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
                 public void delete(File dataSetFolder, String dataSetCode)
                 {
                     File deletedFolder =
-                            new File(archiveDestinationManager.getDestination(), FOLDER_OF_AS_DELETED_MARKED_DATA_SETS);
-                    archiveDestinationManager.getExecutor().createFolder(deletedFolder);
+                            new File(archiveDestination.getDestination(), FOLDER_OF_AS_DELETED_MARKED_DATA_SETS);
+                    archiveDestination.getExecutor().createFolder(deletedFolder);
                     File markerFile = new File(deletedFolder, dataSetCode);
-                    archiveDestinationManager.getExecutor().createMarkerFile(markerFile);
+                    archiveDestination.getExecutor().createMarkerFile(markerFile);
                 }
             });
     }
@@ -170,7 +173,7 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     {
         try
         {
-            File destinationFolder = new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation());
+            File destinationFolder = new File(archiveDestination.getDestination(), dataset.getDataSetLocation());
             BooleanStatus destinationExists = destinationExists(destinationFolder);
             if (destinationExists.isSuccess())
             {
@@ -197,8 +200,8 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     {
         try
         {
-            File destinationFolder = new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation());
-            BooleanStatus resultStatus = archiveDestinationManager.getExecutor().checkSame(originalData, destinationFolder);
+            File destinationFolder = new File(archiveDestination.getDestination(), dataset.getDataSetLocation());
+            BooleanStatus resultStatus = archiveDestination.getExecutor().checkSame(originalData, destinationFolder);
             String message = resultStatus.tryGetMessage();
             if (message != null) // if there is a message something went wrong
             {
@@ -220,8 +223,8 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     {
         try
         {
-            File destinationFolder = new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation());
-            BooleanStatus resultStatus = archiveDestinationManager.getExecutor().exists(destinationFolder);
+            File destinationFolder = new File(archiveDestination.getDestination(), dataset.getDataSetLocation());
+            BooleanStatus resultStatus = archiveDestination.getExecutor().exists(destinationFolder);
             String message = resultStatus.tryGetMessage();
             if (message != null) // if there is a message something went wrong
             {
@@ -249,7 +252,7 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
         BooleanStatus destinationExists = destinationExists(destinationFolder);
         if (destinationExists.isSuccess() == false)
         {
-            archiveDestinationManager.getExecutor().createFolder(destinationFolder);
+            archiveDestination.getExecutor().createFolder(destinationFolder);
             return true;
         }
         return false;
@@ -257,7 +260,7 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
 
     private BooleanStatus destinationExists(File destinationFolder)
     {
-        BooleanStatus destinationExists = archiveDestinationManager.getExecutor().exists(destinationFolder);
+        BooleanStatus destinationExists = archiveDestination.getExecutor().exists(destinationFolder);
         if (destinationExists.isError())
         {
             operationLog.error("Could not check existence of '" + destinationFolder + "': "
@@ -270,14 +273,14 @@ public class DataSetFileOperationsManager extends AbstractDataSetFileOperationsM
     @Override
     public boolean isHosted()
     {
-        return archiveDestinationManager.isHosted();
+        return archiveDestination.isHosted();
     }
 
     @Override
     public IHierarchicalContent getAsHierarchicalContent(DatasetDescription dataset)
     {
         return new DefaultFileBasedHierarchicalContentFactory()
-                .asHierarchicalContent(new File(archiveDestinationManager.getDestination(), dataset.getDataSetLocation()), null);
+                .asHierarchicalContent(new File(archiveDestination.getDestination(), dataset.getDataSetLocation()), null);
     }
 
 }
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DistributedPackagingDataSetFileOperationsManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DistributedPackagingDataSetFileOperationsManager.java
index df911cbd46aaa7a28b75ac22a0b7ac21e98e942b..ed8cac139408cabd3d2efc9a8f686052edbdc302 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DistributedPackagingDataSetFileOperationsManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/DistributedPackagingDataSetFileOperationsManager.java
@@ -19,7 +19,6 @@ package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard;
 import java.io.File;
 import java.io.IOException;
 import java.util.Collection;
-import java.util.List;
 import java.util.Properties;
 
 import org.apache.log4j.Logger;
@@ -37,24 +36,20 @@ import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchical
 import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContentNode;
 import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.VerificationError;
 import ch.systemsx.cisd.openbis.dss.generic.server.AbstractDataSetPackager;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.AbstractDataSetFileOperationsManager;
 import ch.systemsx.cisd.openbis.dss.generic.shared.ArchiveFolders;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IDataSetDirectoryProvider;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IShareIdManager;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IdentifierAttributeMappingManager;
-import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.ContainerDataSet;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IDatasetLocation;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
 import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
-import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
-import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
 
 /**
  * @author Franz-Josef Elmer
  */
-public class DistributedPackagingDataSetFileOperationsManager implements IDataSetFileOperationsManager
+public class DistributedPackagingDataSetFileOperationsManager extends AbstractDataSetFileOperationsManager implements IDataSetFileOperationsManager
 {
     static final String MAPPING_FILE_KEY = "mapping-file";
 
@@ -95,13 +90,9 @@ public class DistributedPackagingDataSetFileOperationsManager implements IDataSe
 
     private final boolean createArchives;
 
-    private transient IEncapsulatedOpenBISService service;
-
-    private transient IDataSetDirectoryProvider directoryProvider;
-
     private transient IdentifierAttributeMappingManager archiveFolderMapping;
 
-    private IPackageManager packageManager;
+    protected IPackageManager packageManager;
 
     public DistributedPackagingDataSetFileOperationsManager(Properties properties, IPackageManager packageManager)
     {
@@ -333,41 +324,6 @@ public class DistributedPackagingDataSetFileOperationsManager implements IDataSe
         return new FilteredHierarchicalContent(packageManager.asHierarchialContent(getArchiveFile(dataset)), FILTER);
     }
 
-    private AbstractExternalData getDataSetWithAllMetaData(DatasetDescription datasetDescription)
-    {
-        AbstractExternalData dataSet = getService().tryGetDataSet(datasetDescription.getDataSetCode());
-        String experimentIdentifier = datasetDescription.getExperimentIdentifier();
-        dataSet.setExperiment(getService().tryGetExperiment(ExperimentIdentifierFactory.parse(experimentIdentifier)));
-        String sampleIdentifier = datasetDescription.getSampleIdentifier();
-        if (sampleIdentifier != null)
-        {
-            dataSet.setSample(getService().tryGetSampleWithExperiment(SampleIdentifierFactory.parse(sampleIdentifier)));
-        }
-        List<ContainerDataSet> containerDataSets = dataSet.getContainerDataSets();
-        if (containerDataSets != null)
-        {
-            for (ContainerDataSet containerDataSet : containerDataSets)
-            {
-                // Inject container properties
-                if (containerDataSet.getProperties() == null)
-                {
-                    containerDataSet.setDataSetProperties(getService().tryGetDataSet(containerDataSet.getCode()).getProperties());
-                }
-                // Inject full container experiment with properties
-                String containerExperimentIdentifier = containerDataSet.getExperiment().getIdentifier();
-                containerDataSet.setExperiment(getService().tryGetExperiment(ExperimentIdentifierFactory.parse(containerExperimentIdentifier)));
-                // Inject full container sample with properties
-                Sample sample = containerDataSet.getSample();
-                if (sample != null)
-                {
-                    String containerSampleIdentifier = sample.getIdentifier();
-                    containerDataSet.setSample(getService().tryGetSampleWithExperiment(SampleIdentifierFactory.parse(containerSampleIdentifier)));
-                }
-            }
-        }
-        return dataSet;
-    }
-
     private File getArchiveFile(DatasetDescription datasetDescription)
     {
         File folder = getArchiveFolderMapping().getArchiveFolder(datasetDescription, null);
@@ -411,24 +367,6 @@ public class DistributedPackagingDataSetFileOperationsManager implements IDataSe
         return folder;
     }
 
-    private IEncapsulatedOpenBISService getService()
-    {
-        if (service == null)
-        {
-            service = ServiceProvider.getOpenBISService();
-        }
-        return service;
-    }
-
-    private IDataSetDirectoryProvider getDirectoryProvider()
-    {
-        if (directoryProvider == null)
-        {
-            directoryProvider = ServiceProvider.getDataStoreService().getDataSetDirectoryProvider();
-        }
-        return directoryProvider;
-    }
-
     private IdentifierAttributeMappingManager getArchiveFolderMapping()
     {
         if (archiveFolderMapping == null)
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/IPackageManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/IPackageManager.java
index 3563314d54c4c2d54104e51a6c5d31dc82afc1d4..9764447240e8bc96bdab9e9d1a88453885f84cb5 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/IPackageManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/IPackageManager.java
@@ -30,11 +30,12 @@ import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IDatasetLocation;
  */
 public interface IPackageManager
 {
-
     String getName(IDatasetLocation dataSetLocation);
 
     void create(File packageFile, AbstractExternalData dataSet);
 
+    void create(File packageFile, List<AbstractExternalData> dataSets);
+
     List<VerificationError> verify(File packageFile);
 
     Status extract(File packageFile, File toDirectory);
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiver.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiver.java
index d975a911b000a12e28bb7c5fb5af398ab422abbd..82da6056cc3bec2a918c3443849ca9f669de3b4e 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiver.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiver.java
@@ -245,7 +245,7 @@ public class RsyncArchiver extends AbstractArchiverProcessingPlugin
                     IHierarchicalContentNode root = content.getRootNode();
                     IHierarchicalContentNode archivedRoot = archivedContent.getRootNode();
 
-                    status = checkHierarchySizeAndChecksums(root, archivedRoot, checksumVerificationCondition);
+                    status = checkHierarchySizeAndChecksums(root, "", archivedRoot, checksumVerificationCondition);
                 } catch (Throwable t)
                 {
                     status = Status.createError("Sanity check for data set " + dataSetCode + " failed: " + t);
@@ -268,11 +268,26 @@ public class RsyncArchiver extends AbstractArchiverProcessingPlugin
         return statuses;
     }
 
-    @Private
-    static Status checkHierarchySizeAndChecksums(IHierarchicalContentNode node,
+    private static String pathCombine(String part1, String part2)
+    {
+        if (part1.equals(""))
+        {
+            return part2;
+        }
+        if (part2.equals(""))
+        {
+            return part1;
+        }
+        return part1 + File.separator + part2;
+    }
+
+    public static Status checkHierarchySizeAndChecksums(
+            IHierarchicalContentNode node,
+            String originalNodeContext,
             IHierarchicalContentNode retrievedNode, ChecksumVerificationCondition checksumVerificationCondition)
     {
-        String relativePath = node.getRelativePath();
+
+        String relativePath = pathCombine(originalNodeContext, node.getRelativePath());
         String relativePathOfRetrieved = retrievedNode.getRelativePath();
         if (relativePath.equals(relativePathOfRetrieved) == false)
         {
@@ -303,7 +318,7 @@ public class RsyncArchiver extends AbstractArchiverProcessingPlugin
             for (int i = 0; i < size; i++)
             {
                 Status status =
-                        checkHierarchySizeAndChecksums(childNodes.get(i),
+                        checkHierarchySizeAndChecksums(childNodes.get(i), originalNodeContext,
                                 childNodesOfRetrieved.get(i), checksumVerificationCondition);
                 if (status.isError())
                 {
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarPackageManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarPackageManager.java
index e31d9c86c08a1e6f52b03826a597bb9f5f4ab486..cd79497ac7842fd9d36120b9763b7bcc533334c2 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarPackageManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarPackageManager.java
@@ -83,6 +83,30 @@ public class TarPackageManager implements IPackageManager
         }
     }
 
+    @Override
+    public void create(File packageFile, List<AbstractExternalData> dataSets)
+    {
+        TarDataSetPackager packager = null;
+
+        try
+        {
+            DataSetExistenceChecker existenceChecker =
+                    new DataSetExistenceChecker(getDirectoryProvider(), TimingParameters.create(new Properties()));
+            packager = new TarDataSetPackager(packageFile, getContentProvider(), existenceChecker);
+
+            for (AbstractExternalData dataSet : dataSets)
+            {
+                packager.addDataSetTo(dataSet.getCode() + "/", dataSet);
+            }
+        } finally
+        {
+            if (packager != null)
+            {
+                packager.close();
+            }
+        }
+    }
+
     @Override
     public List<VerificationError> verify(File packageFile)
     {
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipPackageManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipPackageManager.java
index e166e8277b12f50cf31f25d0ceded0519120542f..60de60467e7438ba4e9c799e86de2e1eebbd1d83 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipPackageManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipPackageManager.java
@@ -95,6 +95,29 @@ public class ZipPackageManager implements IPackageManager
         }
     }
 
+    @Override
+    public void create(File packageFile, List<AbstractExternalData> dataSets)
+    {
+        ZipDataSetPackager packager = null;
+
+        try
+        {
+            DataSetExistenceChecker existenceChecker =
+                    new DataSetExistenceChecker(getDirectoryProvider(), TimingParameters.create(new Properties()));
+            packager = new ZipDataSetPackager(packageFile, compress, getContentProvider(), existenceChecker);
+            for (AbstractExternalData dataSet : dataSets)
+            {
+                packager.addDataSetTo(dataSet.getCode(), dataSet);
+            }
+        } finally
+        {
+            if (packager != null)
+            {
+                packager.close();
+            }
+        }
+    }
+
     @Override
     public List<VerificationError> verify(File packageFile)
     {
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/AbstractDataSetFileOperationsManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/AbstractDataSetFileOperationsManager.java
index d83ebf646f1311450890e7ed3466affb172e7e5e..5d6187ea24c7b0aa5f1dd5a65fa45eabcfe7a870 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/AbstractDataSetFileOperationsManager.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/AbstractDataSetFileOperationsManager.java
@@ -16,22 +16,18 @@
 
 package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
 
-import java.io.File;
-import java.util.Properties;
+import java.util.List;
 
 import ch.rinn.restrictions.Private;
-import ch.systemsx.cisd.common.filesystem.FileOperations;
-import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.filesystem.HostAwareFile;
-import ch.systemsx.cisd.common.filesystem.IPathCopier;
-import ch.systemsx.cisd.common.filesystem.highwatermark.HostAwareFileWithHighwaterMark;
-import ch.systemsx.cisd.common.filesystem.ssh.ISshCommandExecutor;
-import ch.systemsx.cisd.openbis.dss.generic.server.IDataSetFileOperationsExecutor;
-import ch.systemsx.cisd.openbis.dss.generic.server.LocalDataSetFileOperationsExcecutor;
-import ch.systemsx.cisd.openbis.dss.generic.server.RemoteDataSetFileOperationsExecutor;
-import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.Copier;
-import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.IPathCopierFactory;
-import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.ISshCommandExecutorFactory;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataSetDirectoryProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.ContainerDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
 
 /**
  * @author Jakub Straszewski
@@ -40,134 +36,89 @@ public abstract class AbstractDataSetFileOperationsManager
 {
 
     @Private
-    protected static final String DESTINATION_KEY = "destination";
+    public static final String DESTINATION_KEY = "destination";
 
     @Private
-    protected static final String TIMEOUT_KEY = "timeout";
+    public static final String TIMEOUT_KEY = "timeout";
 
     @Private
-    protected static final String RSYNC_PASSWORD_FILE_KEY = "rsync-password-file";
+    public static final String RSYNC_PASSWORD_FILE_KEY = "rsync-password-file";
 
     @Private
     protected static final String CHECK_EXISTENCE_FAILED = "couldn't check existence";
 
     @Private
-    protected static final String DESTINATION_DOES_NOT_EXIST = "destination doesn't exist";
+    public static final String DESTINATION_DOES_NOT_EXIST = "destination doesn't exist";
 
     @Private
-    protected static final String RSYNC_EXEC = "rsync";
+    public static final String RSYNC_EXEC = "rsync";
 
     @Private
-    protected static final String SSH_EXEC = "ssh";
+    public static final String SSH_EXEC = "ssh";
 
     @Private
-    protected static final String GFIND_EXEC = "find";
+    public static final String GFIND_EXEC = "find";
 
     @Private
-    protected static final long DEFAULT_TIMEOUT_SECONDS = 15;
+    public static final long DEFAULT_TIMEOUT_SECONDS = 15;
 
     @Private
-    protected static final String FOLDER_OF_AS_DELETED_MARKED_DATA_SETS = "DELETED";
+    public static final String FOLDER_OF_AS_DELETED_MARKED_DATA_SETS = "DELETED";
 
-    public static class ArchiveDestination
-    {
-        private final String destination;
-
-        private final IDataSetFileOperationsExecutor executor;
+    protected transient IEncapsulatedOpenBISService service;
 
-        private final boolean isHosted;
+    protected transient IDataSetDirectoryProvider directoryProvider;
 
-        private final long timeoutInMillis;
-
-        private ArchiveDestination(String destination, IDataSetFileOperationsExecutor executor, boolean isHosted, long timeoutInMillis)
-        {
-            this.destination = destination;
-            this.executor = executor;
-            this.isHosted = isHosted;
-            this.timeoutInMillis = timeoutInMillis;
-        }
-
-        public String getDestination()
-        {
-            return destination;
-        }
-
-        public IDataSetFileOperationsExecutor getExecutor()
-        {
-            return executor;
-        }
-
-        public boolean isHosted()
+    protected IEncapsulatedOpenBISService getService()
+    {
+        if (service == null)
         {
-            return isHosted;
+            service = ServiceProvider.getOpenBISService();
         }
+        return service;
+    }
 
-        public long getTimeoutInMillis()
+    protected IDataSetDirectoryProvider getDirectoryProvider()
+    {
+        if (directoryProvider == null)
         {
-            return timeoutInMillis;
+            directoryProvider = ServiceProvider.getDataStoreService().getDataSetDirectoryProvider();
         }
-
+        return directoryProvider;
     }
 
-    protected static ArchiveDestination createArchiveDestinationManager(Properties properties,
-            IPathCopierFactory pathCopierFactory,
-            ISshCommandExecutorFactory sshCommandExecutorFactory, String archiveDestinationHost, long timeoutInMillis)
+    protected AbstractExternalData getDataSetWithAllMetaData(DatasetDescription datasetDescription)
     {
-        HostAwareFile hostAwareFile = HostAwareFileWithHighwaterMark.create(archiveDestinationHost, -1);
-        boolean isHosted = hostAwareFile.tryGetHost() != null;
-        String destination = hostAwareFile.getPath();
-        IDataSetFileOperationsExecutor executor;
-
-        if (false == isHosted)
+        AbstractExternalData dataSet = getService().tryGetDataSet(datasetDescription.getDataSetCode());
+        String experimentIdentifier = datasetDescription.getExperimentIdentifier();
+        dataSet.setExperiment(getService().tryGetExperiment(ExperimentIdentifierFactory.parse(experimentIdentifier)));
+        String sampleIdentifier = datasetDescription.getSampleIdentifier();
+        if (sampleIdentifier != null)
         {
-            executor = createLocalDataSetFileOperationsExecutor(properties, pathCopierFactory, hostAwareFile, timeoutInMillis);
-        } else
+            dataSet.setSample(getService().tryGetSampleWithExperiment(SampleIdentifierFactory.parse(sampleIdentifier)));
+        }
+        List<ContainerDataSet> containerDataSets = dataSet.getContainerDataSets();
+        if (containerDataSets != null)
         {
-            executor =
-                    createRemoteDataSetFileOperationsExecutor(properties, pathCopierFactory, sshCommandExecutorFactory, hostAwareFile,
-                            timeoutInMillis);
+            for (ContainerDataSet containerDataSet : containerDataSets)
+            {
+                // Inject container properties
+                if (containerDataSet.getProperties() == null)
+                {
+                    containerDataSet.setDataSetProperties(getService().tryGetDataSet(containerDataSet.getCode()).getProperties());
+                }
+                // Inject full container experiment with properties
+                String containerExperimentIdentifier = containerDataSet.getExperiment().getIdentifier();
+                containerDataSet.setExperiment(getService().tryGetExperiment(ExperimentIdentifierFactory.parse(containerExperimentIdentifier)));
+                // Inject full container sample with properties
+                Sample sample = containerDataSet.getSample();
+                if (sample != null)
+                {
+                    String containerSampleIdentifier = sample.getIdentifier();
+                    containerDataSet.setSample(getService().tryGetSampleWithExperiment(SampleIdentifierFactory.parse(containerSampleIdentifier)));
+                }
+            }
         }
-        return new ArchiveDestination(destination, executor, isHosted, timeoutInMillis);
-    }
-
-    protected static RemoteDataSetFileOperationsExecutor createRemoteDataSetFileOperationsExecutor(Properties properties,
-            IPathCopierFactory pathCopierFactory, ISshCommandExecutorFactory sshCommandExecutorFactory, HostAwareFile hostAwareFile,
-            long timeoutInMillis)
-    {
-        String hostOrNull = hostAwareFile.tryGetHost();
-        File sshExecutable = Copier.getExecutable(properties, SSH_EXEC);
-        File rsyncExecutable = Copier.getExecutable(properties, RSYNC_EXEC);
-        File gfindExecutable = Copier.getExecutable(properties, GFIND_EXEC);
-
-        IPathCopier copier =
-                pathCopierFactory.create(rsyncExecutable, sshExecutable, timeoutInMillis);
-        copier.check();
-        String rsyncModule = hostAwareFile.tryGetRsyncModule();
-        String rsyncPasswordFile = properties.getProperty(RSYNC_PASSWORD_FILE_KEY);
-        FileUtilities.checkPathCopier(copier, hostOrNull, null, rsyncModule, rsyncPasswordFile,
-                timeoutInMillis);
-        ISshCommandExecutor sshCommandExecutor = sshCommandExecutorFactory.create(sshExecutable, hostOrNull);
-        RemoteDataSetFileOperationsExecutor result = new RemoteDataSetFileOperationsExecutor(sshCommandExecutor, copier,
-                gfindExecutable, hostOrNull, rsyncModule, rsyncPasswordFile,
-                timeoutInMillis);
-        return result;
-    }
-
-    protected static LocalDataSetFileOperationsExcecutor createLocalDataSetFileOperationsExecutor(Properties properties,
-            IPathCopierFactory pathCopierFactory,
-            HostAwareFile hostAwareFile, long timeoutInMillis)
-    {
-        File sshExecutable = null; // don't use ssh locally
-        File rsyncExecutable = Copier.getExecutable(properties, RSYNC_EXEC);
-        IPathCopier copier =
-                pathCopierFactory.create(rsyncExecutable, sshExecutable, timeoutInMillis);
-        copier.check();
-        String rsyncModule = hostAwareFile.tryGetRsyncModule();
-        String rsyncPasswordFile = properties.getProperty(RSYNC_PASSWORD_FILE_KEY);
-        LocalDataSetFileOperationsExcecutor result = new LocalDataSetFileOperationsExcecutor(
-                FileOperations.getMonitoredInstanceForCurrentThread(), copier,
-                rsyncModule, rsyncPasswordFile);
-        return result;
+        return dataSet;
     }
-
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestination.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestination.java
new file mode 100644
index 0000000000000000000000000000000000000000..33ead6ff4181d1302b3e54a100f2af798102d6fe
--- /dev/null
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestination.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2014 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import ch.systemsx.cisd.openbis.dss.generic.server.IDataSetFileOperationsExecutor;
+
+/**
+ * @author Jakub Straszewski
+ */
+public class ArchiveDestination
+{
+    /**
+     * Path to the destination
+     */
+    private final String destination;
+
+    private final IDataSetFileOperationsExecutor executor;
+
+    private final boolean isHosted;
+
+    private final long timeoutInMillis;
+
+    public ArchiveDestination(String destination, IDataSetFileOperationsExecutor executor, boolean isHosted, long timeoutInMillis)
+    {
+        this.destination = destination;
+        this.executor = executor;
+        this.isHosted = isHosted;
+        this.timeoutInMillis = timeoutInMillis;
+    }
+
+    public String getDestination()
+    {
+        return destination;
+    }
+
+    public IDataSetFileOperationsExecutor getExecutor()
+    {
+        return executor;
+    }
+
+    public boolean isHosted()
+    {
+        return isHosted;
+    }
+
+    public long getTimeoutInMillis()
+    {
+        return timeoutInMillis;
+    }
+
+}
\ No newline at end of file
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestinationFactory.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestinationFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..9e51b57fd22d2647384d72521834cfc9a12ee297
--- /dev/null
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/ArchiveDestinationFactory.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2014 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import java.io.File;
+import java.io.Serializable;
+import java.util.Properties;
+
+import ch.systemsx.cisd.common.filesystem.FileOperations;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.filesystem.HostAwareFile;
+import ch.systemsx.cisd.common.filesystem.IPathCopier;
+import ch.systemsx.cisd.common.filesystem.highwatermark.HostAwareFileWithHighwaterMark;
+import ch.systemsx.cisd.common.filesystem.ssh.ISshCommandExecutor;
+import ch.systemsx.cisd.openbis.dss.generic.server.IDataSetFileOperationsExecutor;
+import ch.systemsx.cisd.openbis.dss.generic.server.LocalDataSetFileOperationsExcecutor;
+import ch.systemsx.cisd.openbis.dss.generic.server.RemoteDataSetFileOperationsExecutor;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.Copier;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.IPathCopierFactory;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.ISshCommandExecutorFactory;
+
+/**
+ * @author Jakub Straszewski
+ */
+
+public class ArchiveDestinationFactory implements Serializable
+{
+    private static final long serialVersionUID = 1L;
+
+    private final Properties properties;
+
+    private final IPathCopierFactory pathCopierFactory;
+
+    private final ISshCommandExecutorFactory sshCommandExecutorFactory;
+
+    private final String archiveDestinationHost;
+
+    private final long timeoutInMillis;
+
+    public ArchiveDestinationFactory(Properties properties, IPathCopierFactory pathCopierFactory,
+            ISshCommandExecutorFactory sshCommandExecutorFactory, String archiveDestinationHost, long timeoutInMillis)
+    {
+        this.properties = properties;
+        this.pathCopierFactory = pathCopierFactory;
+        this.sshCommandExecutorFactory = sshCommandExecutorFactory;
+        this.archiveDestinationHost = archiveDestinationHost;
+        this.timeoutInMillis = timeoutInMillis;
+    }
+
+    public ArchiveDestination createArchiveDestination()
+    {
+        HostAwareFile hostAwareFile = HostAwareFileWithHighwaterMark.create(archiveDestinationHost, -1);
+        boolean isHosted = hostAwareFile.tryGetHost() != null;
+        String destination = hostAwareFile.getPath();
+        IDataSetFileOperationsExecutor executor;
+
+        if (false == isHosted)
+        {
+            executor = createLocalDataSetFileOperationsExecutor(properties, pathCopierFactory, hostAwareFile, timeoutInMillis);
+        } else
+        {
+            executor =
+                    createRemoteDataSetFileOperationsExecutor(properties, pathCopierFactory, sshCommandExecutorFactory, hostAwareFile,
+                            timeoutInMillis);
+        }
+        return new ArchiveDestination(destination, executor, isHosted, timeoutInMillis);
+    }
+
+    protected static RemoteDataSetFileOperationsExecutor createRemoteDataSetFileOperationsExecutor(Properties properties,
+            IPathCopierFactory pathCopierFactory, ISshCommandExecutorFactory sshCommandExecutorFactory, HostAwareFile hostAwareFile,
+            long timeoutInMillis)
+    {
+        String hostOrNull = hostAwareFile.tryGetHost();
+        File sshExecutable = Copier.getExecutable(properties, AbstractDataSetFileOperationsManager.SSH_EXEC);
+        File rsyncExecutable = Copier.getExecutable(properties, AbstractDataSetFileOperationsManager.RSYNC_EXEC);
+        File gfindExecutable = Copier.getExecutable(properties, AbstractDataSetFileOperationsManager.GFIND_EXEC);
+
+        IPathCopier copier =
+                pathCopierFactory.create(rsyncExecutable, sshExecutable, timeoutInMillis);
+        copier.check();
+        String rsyncModule = hostAwareFile.tryGetRsyncModule();
+        String rsyncPasswordFile = properties.getProperty(AbstractDataSetFileOperationsManager.RSYNC_PASSWORD_FILE_KEY);
+        FileUtilities.checkPathCopier(copier, hostOrNull, null, rsyncModule, rsyncPasswordFile,
+                timeoutInMillis);
+        ISshCommandExecutor sshCommandExecutor = sshCommandExecutorFactory.create(sshExecutable, hostOrNull);
+        RemoteDataSetFileOperationsExecutor result = new RemoteDataSetFileOperationsExecutor(sshCommandExecutor, copier,
+                gfindExecutable, hostOrNull, rsyncModule, rsyncPasswordFile,
+                timeoutInMillis);
+        return result;
+    }
+
+    protected static LocalDataSetFileOperationsExcecutor createLocalDataSetFileOperationsExecutor(Properties properties,
+            IPathCopierFactory pathCopierFactory,
+            HostAwareFile hostAwareFile, long timeoutInMillis)
+    {
+        File sshExecutable = null; // don't use ssh locally
+        File rsyncExecutable = Copier.getExecutable(properties, AbstractDataSetFileOperationsManager.RSYNC_EXEC);
+        IPathCopier copier =
+                pathCopierFactory.create(rsyncExecutable, sshExecutable, timeoutInMillis);
+        copier.check();
+        String rsyncModule = hostAwareFile.tryGetRsyncModule();
+        String rsyncPasswordFile = properties.getProperty(AbstractDataSetFileOperationsManager.RSYNC_PASSWORD_FILE_KEY);
+        LocalDataSetFileOperationsExcecutor result = new LocalDataSetFileOperationsExcecutor(
+                FileOperations.getMonitoredInstanceForCurrentThread(), copier,
+                rsyncModule, rsyncPasswordFile);
+        return result;
+    }
+
+}
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetFileOperationsManager.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetFileOperationsManager.java
new file mode 100644
index 0000000000000000000000000000000000000000..7fad699868912ae2596ed208d2e128e236372000
--- /dev/null
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetFileOperationsManager.java
@@ -0,0 +1,295 @@
+/*
+ * Copyright 2014 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import java.io.File;
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.log4j.Logger;
+
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+import ch.systemsx.cisd.common.exceptions.ExceptionWithStatus;
+import ch.systemsx.cisd.common.exceptions.Status;
+import ch.systemsx.cisd.common.filesystem.BooleanStatus;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
+import ch.systemsx.cisd.common.properties.PropertyUtils;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.FilteredHierarchicalContent;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.IHierarchicalContentNodeFilter;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContent;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContentNode;
+import ch.systemsx.cisd.openbis.dss.archiveverifier.batch.VerificationError;
+import ch.systemsx.cisd.openbis.dss.generic.server.AbstractDataSetPackager;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.IPackageManager;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.IPathCopierFactory;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.ISshCommandExecutorFactory;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TarPackageManager;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverContainerDTO;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IShareIdManager;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+
+/**
+ * @author Jakub Straszewski
+ */
+public class MultiDataSetFileOperationsManager extends AbstractDataSetFileOperationsManager implements Serializable
+{
+    private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION, MultiDataSetFileOperationsManager.class);
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String STAGING_DESTINATION_KEY = "staging-destination";
+
+    private static final String FINAL_DESTINATION_KEY = "final-destination";
+
+    private transient ArchiveDestination stageArchive;
+
+    private transient ArchiveDestination finalArchive;
+
+    private final ArchiveDestinationFactory stageArchivefactory;
+
+    private final ArchiveDestinationFactory finalArchivefactory;
+
+    protected IPackageManager packageManager;
+
+    // TODO: some features existing in rsync archiver:
+    // - with sharding
+    // - ignore existing
+
+    public MultiDataSetFileOperationsManager(Properties properties, IPathCopierFactory pathCopierFactory,
+            ISshCommandExecutorFactory sshCommandExecutorFactory)
+    {
+        this.packageManager = new TarPackageManager(properties);
+        long timeoutInSeconds =
+                PropertyUtils.getLong(properties, TIMEOUT_KEY, DEFAULT_TIMEOUT_SECONDS);
+        long timeoutInMillis = timeoutInSeconds * DateUtils.MILLIS_PER_SECOND;
+
+        String stagingHostFile = PropertyUtils.getMandatoryProperty(properties, STAGING_DESTINATION_KEY);
+
+        String finalHostFile = PropertyUtils.getMandatoryProperty(properties, FINAL_DESTINATION_KEY);
+
+        if (false == new File(stagingHostFile).isDirectory())
+        {
+            throw new ConfigurationFailureException("Archiving stage area '" + stagingHostFile + "' is not an existing directory");
+        }
+
+        if (false == new File(finalHostFile).isDirectory())
+        {
+            throw new ConfigurationFailureException("Archiving final destination '" + finalHostFile + "' is not an existing directory");
+        }
+
+        this.stageArchivefactory =
+                new ArchiveDestinationFactory(properties, pathCopierFactory, sshCommandExecutorFactory, stagingHostFile, timeoutInMillis);
+        this.finalArchivefactory =
+                new ArchiveDestinationFactory(properties, pathCopierFactory, sshCommandExecutorFactory, finalHostFile, timeoutInMillis);
+
+    }
+
+    private ArchiveDestination getStageArchive()
+    {
+        if (stageArchive == null)
+        {
+            stageArchive = stageArchivefactory.createArchiveDestination();
+        }
+        return stageArchive;
+    }
+
+    private ArchiveDestination getFinalArchive()
+    {
+        if (finalArchive == null)
+        {
+            finalArchive = finalArchivefactory.createArchiveDestination();
+        }
+        return finalArchive;
+    }
+
+    public Status deleteContainerFromStage(String containerPath)
+    {
+        File stageArchiveContainerFile = new File(getStageArchive().getDestination(), containerPath);
+
+        if (false == stageArchiveContainerFile.isFile())
+        {
+            operationLog.warn("Archive container '" + containerPath + "' doesn't exist.");
+            return Status.OK;
+        }
+        boolean success = stageArchiveContainerFile.delete();
+        return success ? Status.OK : Status.createError("Couldn't delete archive container '" + containerPath);
+    }
+
+    public Status createContainerInStage(String containerPath, List<DatasetDescription> datasetDescriptions)
+    {
+        List<AbstractExternalData> dataSets = new LinkedList<AbstractExternalData>();
+
+        IShareIdManager shareIdManager = getDirectoryProvider().getShareIdManager();
+
+        for (DatasetDescription datasetDescription : datasetDescriptions)
+        {
+            AbstractExternalData dataSet = getDataSetWithAllMetaData(datasetDescription);
+            dataSets.add(dataSet);
+            shareIdManager.lock(dataSet.getCode());
+            operationLog.info("Archive dataset " + dataSet.getCode() + " in " + containerPath);
+        }
+
+        File stageArchiveContainerFile = new File(getStageArchive().getDestination(), containerPath);
+
+        boolean result = createFolderIfNotExists(stageArchive, stageArchiveContainerFile.getParentFile());
+
+        // TODO: react somehow?
+        if (result)
+        {
+            operationLog.warn("File already exists in archive " + stageArchiveContainerFile.getParentFile());
+        }
+
+        Status status = Status.OK;
+        try
+        {
+            packageManager.create(stageArchiveContainerFile, dataSets); // packagemanager
+        } catch (Exception ex)
+        {
+            status = Status.createError(ex.toString());
+            operationLog.error("Couldn't create package file: " + containerPath, ex);
+        } finally
+        {
+            try
+            {
+                if (Status.OK.equals(status))
+                {
+                    Collection<VerificationError> errors = packageManager.verify(stageArchiveContainerFile);
+
+                    if (errors.size() > 0)
+                    {
+                        status = Status.createError(errors.toString());
+                        throw new RuntimeException(errors.toString());
+                    }
+                }
+
+                operationLog.info("Data sets '" + "dataSetCode" + "' archived: " + containerPath);
+            } catch (Exception ex)
+            {
+                operationLog.error("Couldn't create package file: " + containerPath, ex);
+            }
+            for (DatasetDescription datasetDescription : datasetDescriptions)
+            {
+                shareIdManager.releaseLock(datasetDescription.getDataSetCode());
+            }
+        }
+        return status;
+    }
+
+    /**
+     * Returns container path local to the archive root
+     */
+    public String getContainerPath(MultiDataSetArchiverContainerDTO container)
+    {
+        return container.getPath() + ".tar";
+    }
+
+    /**
+     * Copies specified dataset's data to destination specified in constructor. The path at the destination is defined by the original location of the
+     * data set.
+     */
+    public Status copyToFinalDestination(String containerLocalPath)
+    {
+        ArchiveDestination stageDestination = getStageArchive();
+        File containerFile = new File(stageDestination.getDestination(), containerLocalPath);
+
+        ArchiveDestination finalDestination = getFinalArchive();
+        try
+        {
+            File destinationFolder = new File(finalDestination.getDestination(), containerLocalPath);
+            if (createFolderIfNotExists(finalDestination, destinationFolder.getParentFile())
+                    || destinationExists(finalDestination, destinationFolder).isSuccess() == false)
+            {
+                operationLog.info("Copy archive container from '"
+                        + containerFile + "' to '" + destinationFolder.getParentFile());
+                finalDestination.getExecutor().copyDataSetToDestination(containerFile, destinationFolder.getParentFile());
+            } else
+            {
+                operationLog.info("Update archive container from '"
+                        + containerFile + "' to '" + destinationFolder.getParentFile());
+                finalDestination.getExecutor().syncDataSetWithDestination(containerFile, destinationFolder.getParentFile());
+            }
+
+            return Status.OK;
+        } catch (ExceptionWithStatus ex)
+        {
+            return ex.getStatus();
+        }
+    }
+
+    public Status deleteContainerFromFinalDestination(String containerLocalPath)
+    {
+        try
+        {
+            ArchiveDestination finalDestination = getFinalArchive();
+            File containerInFinalDestination = new File(finalDestination.getDestination(), containerLocalPath);
+            finalDestination.getExecutor().deleteFolder(containerInFinalDestination);
+            return Status.OK;
+        } catch (ExceptionWithStatus ex)
+        {
+            return ex.getStatus();
+        }
+    }
+
+    private static final IHierarchicalContentNodeFilter METADATA_IN_CONTAINER_FILTER = new IHierarchicalContentNodeFilter()
+        {
+            @Override
+            public boolean accept(IHierarchicalContentNode node)
+            {
+                return AbstractDataSetPackager.META_DATA_FILE_NAME.equals(node.getName()) == false;
+            }
+        };
+
+    public IHierarchicalContent getContainerAsHierarchicalContent(String containerPath)
+    {
+        ArchiveDestination archiveDestination = getFinalArchive();
+        String destinationRoot = archiveDestination.getDestination();
+        File containerInDestination = new File(destinationRoot, containerPath);
+
+        return new FilteredHierarchicalContent(packageManager.asHierarchialContent(containerInDestination), METADATA_IN_CONTAINER_FILTER);
+
+    }
+
+    private boolean createFolderIfNotExists(ArchiveDestination archiveDestination, File destinationFolder)
+    {
+        BooleanStatus destinationExists = destinationExists(archiveDestination, destinationFolder);
+        if (destinationExists.isSuccess() == false)
+        {
+            archiveDestination.getExecutor().createFolder(destinationFolder);
+            return true;
+        }
+        return false;
+    }
+
+    private BooleanStatus destinationExists(ArchiveDestination archiveDestination, File destinationFolder)
+    {
+        BooleanStatus destinationExists = archiveDestination.getExecutor().exists(destinationFolder);
+        if (destinationExists.isError())
+        {
+            // operationLog.error("Could not check existence of '" + destinationFolder + "': "
+            // + destinationExists.tryGetMessage());
+            throw new ExceptionWithStatus(Status.createError("CHECK_EXISTENCE_FAILED"));
+        }
+        return destinationExists;
+    }
+
+}
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java
new file mode 100644
index 0000000000000000000000000000000000000000..d564552ecd8757bac259bbac1d3e69d9b94069cb
--- /dev/null
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java
@@ -0,0 +1,267 @@
+/*
+ * Copyright 2014 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import java.io.File;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Properties;
+
+import ch.systemsx.cisd.common.exceptions.NotImplementedException;
+import ch.systemsx.cisd.common.exceptions.Status;
+import ch.systemsx.cisd.common.filesystem.BooleanStatus;
+import ch.systemsx.cisd.common.properties.PropertyUtils;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContent;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.api.IHierarchicalContentNode;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.AbstractArchiverProcessingPlugin;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiveCopierFactory;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.RsyncArchiver;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.SshCommandExecutorFactory;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverContainerDTO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDatasetArchiverDBTransaction;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ArchiverTaskContext;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.IDatasetLocation;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+
+/**
+ * @author Jakub Straszewski
+ */
+public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
+{
+    private static final long serialVersionUID = 1L;
+
+    private transient MultiDataSetFileOperationsManager fileOperations;
+
+    private final FileOperationsManagerFactory fileOperationsFactory;
+
+    private static class FileOperationsManagerFactory implements Serializable
+    {
+        private static final long serialVersionUID = 1L;
+
+        private final Properties properties;
+
+        private FileOperationsManagerFactory(Properties properties)
+        {
+            this.properties = properties;
+        }
+
+        private MultiDataSetFileOperationsManager create()
+        {
+            return new MultiDataSetFileOperationsManager(properties, new RsyncArchiveCopierFactory(), new SshCommandExecutorFactory());
+        }
+    }
+
+    private final long minimumContainerSize;
+
+    private final long maximumContainerSize;
+
+    public static final String MINIMUM_CONTAINER_SIZE_IN_BYTES = "minimum-container-size-in-bytes";
+
+    public static final Long DEFAULT_MINIMUM_CONTAINER_SIZE_IN_BYTES = 10L * 1024 * 1024 * 1024;
+
+    public static final String MAXIMUM_CONTAINER_SIZE_IN_BYTES = "maximum-container-size-in-bytes";
+
+    public static final Long DEFAULT_MAXIMUM_CONTAINER_SIZE_IN_BYTES = 80L * 1024 * 1024 * 1024;
+
+    public MultiDatasetArchiver(Properties properties, File storeRoot)
+    {
+        super(properties, storeRoot, null, null);
+        this.minimumContainerSize = PropertyUtils.getLong(properties, MINIMUM_CONTAINER_SIZE_IN_BYTES, DEFAULT_MINIMUM_CONTAINER_SIZE_IN_BYTES);
+        this.maximumContainerSize = PropertyUtils.getLong(properties, MAXIMUM_CONTAINER_SIZE_IN_BYTES, DEFAULT_MAXIMUM_CONTAINER_SIZE_IN_BYTES);
+        this.fileOperationsFactory = new FileOperationsManagerFactory(properties);
+    }
+
+    @Override
+    protected DatasetProcessingStatuses doArchive(List<DatasetDescription> dataSets, ArchiverTaskContext context)
+    {
+        verifyDataSetsSize(dataSets);
+
+        DatasetProcessingStatuses result = new DatasetProcessingStatuses();
+
+        MultiDatasetArchiverDBTransaction transaction = new MultiDatasetArchiverDBTransaction();
+
+        try
+        {
+            result = doArchive(dataSets, transaction, context);
+
+            transaction.commit();
+            transaction.close();
+        } catch (Exception e)
+        {
+            operationLog.warn("Archiving of " + dataSets.size() + " data sets failed", e);
+            try
+            {
+                transaction.rollback();
+                transaction.close();
+            } catch (Exception ex)
+            {
+                operationLog.warn("Rollback of multi dataset db transaction failed", ex);
+            }
+            result.addResult(dataSets, Status.createError(e.getMessage()), Operation.ARCHIVE);
+        }
+        return result;
+    }
+
+    private void verifyDataSetsSize(List<DatasetDescription> dataSets)
+    {
+        long datasetSize = getDataSetsSize(dataSets);
+        if (dataSets.size() == 1)
+        {
+            if (datasetSize < minimumContainerSize)
+            {
+                throw new IllegalArgumentException("Dataset " + dataSets.get(0).getDataSetCode()
+                        + " is too small to be archived with multi dataset archiver.");
+            }
+            // if single dataset is bigger than specified maximum, we should still allow it being
+        }
+        else
+        {
+            if (datasetSize < minimumContainerSize)
+            {
+                throw new IllegalArgumentException("Datasets specified for archiving are too small to be archived with multi dataset archiver.");
+            }
+            else if (datasetSize > maximumContainerSize)
+            {
+                throw new IllegalArgumentException("Datasets specified for archiving are too big to be archived with multi dataset archiver.");
+            }
+        }
+    }
+
+    private DatasetProcessingStatuses doArchive(List<DatasetDescription> dataSets, MultiDatasetArchiverDBTransaction transaction,
+            ArchiverTaskContext context) throws Exception
+    {
+        DatasetProcessingStatuses statuses = new DatasetProcessingStatuses();
+
+        MultiDataSetArchiverContainerDTO container = transaction.createContainer();
+
+        for (DatasetDescription dataSet : dataSets)
+        {
+            transaction.insertDataset(dataSet, container);
+        }
+
+        String containerPath = getFileOperations().getContainerPath(container);
+        IHierarchicalContent archivedContent = null;
+
+        try
+        {
+
+            Status status = getFileOperations().createContainerInStage(containerPath, dataSets);
+            if (status.isError())
+            {
+                throw new Exception("Couldn't create package file in stage archive " + containerPath);
+            }
+
+            status = getFileOperations().copyToFinalDestination(containerPath);
+
+            if (status.isError())
+            {
+                throw new Exception("Couldn't copy container to final store");
+            }
+
+            archivedContent = getFileOperations().getContainerAsHierarchicalContent(containerPath);
+
+            for (DatasetDescription dataset : dataSets)
+            {
+                String dataSetCode = dataset.getDataSetCode();
+                IHierarchicalContent content = null;
+                try
+                {
+                    content = context.getHierarchicalContentProvider().asContentWithoutModifyingAccessTimestamp(dataSetCode);
+
+                    IHierarchicalContentNode root = content.getRootNode();
+                    IHierarchicalContentNode archiveDataSetRoot = archivedContent.getNode(dataset.getDataSetCode());
+
+                    status =
+                            RsyncArchiver.checkHierarchySizeAndChecksums(root, dataSetCode, archiveDataSetRoot,
+                                    RsyncArchiver.ChecksumVerificationCondition.IF_AVAILABLE);
+
+                    if (status.isError())
+                    {
+                        throw new Exception(status.tryGetErrorMessage());
+                    }
+                } finally
+                {
+                    if (content != null)
+                    {
+                        content.close();
+                    }
+                }
+                statuses.addResult(dataSetCode, status, Operation.ARCHIVE);
+            }
+        } catch (Exception ex)
+        {
+            getFileOperations().deleteContainerFromFinalDestination(containerPath);
+            // In case of error we actually should delete failed container here. If the transaction fail that the AbstractArchiver is unable to locate
+            // container file.
+            throw ex;
+        } finally
+        {
+            // always delete staging content
+            getFileOperations().deleteContainerFromStage(containerPath);
+
+            if (archivedContent != null)
+            {
+                archivedContent.close();
+            }
+        }
+        return statuses;
+
+    }
+
+    private long getDataSetsSize(List<DatasetDescription> ds)
+    {
+        long result = 0;
+        for (DatasetDescription dataset : ds)
+        {
+            result += dataset.getDataSetSize();
+        }
+        return result;
+    }
+
+    @Override
+    protected DatasetProcessingStatuses doUnarchive(List<DatasetDescription> datasets, ArchiverTaskContext context)
+    {
+        throw new NotImplementedException("Unarchiving is not yet implemented for multi dataset archiver");
+    }
+
+    @Override
+    protected DatasetProcessingStatuses doDeleteFromArchive(List<? extends IDatasetLocation> datasets)
+    {
+        throw new NotImplementedException("Deleting from archive is not yet implemented for multi dataset archiver");
+    }
+
+    @Override
+    protected BooleanStatus isDataSetSynchronizedWithArchive(DatasetDescription dataset, ArchiverTaskContext context)
+    {
+        return BooleanStatus.createFalse();
+    }
+
+    @Override
+    protected BooleanStatus isDataSetPresentInArchive(DatasetDescription dataset)
+    {
+        return BooleanStatus.createFalse();
+    }
+
+    public MultiDataSetFileOperationsManager getFileOperations()
+    {
+        if (fileOperations == null)
+        {
+            fileOperations = fileOperationsFactory.create();
+        }
+        return fileOperations;
+    }
+}
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiverTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiverTest.java
index e2daa080e225a7d957a38361e4ead8814c9bceb6..d26ff827a8d557b3f293182514f9e00113fdd31b 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiverTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/RsyncArchiverTest.java
@@ -301,7 +301,22 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
                         .getRootNode();
         assertEquals(
                 "OK",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
+                        RsyncArchiver.ChecksumVerificationCondition.YES).toString());
+    }
+
+    @Test
+    public void testCheckHierarchySizeAndChecksumsHappyCaseWithContainers()
+    {
+        IHierarchicalContentNode root1 =
+                new MockContent(":0:0", "f2.txt:15:13", "f1.txt:5:-3")
+                        .getRootNode();
+        IHierarchicalContentNode root2 =
+                new MockContent(":0:0", "a/:0:0", "a/f1.txt:5:-3", "a/f2.txt:15:13", "r.txt:7:17")
+                        .getRootNode().getChildNodes().get(0);
+        assertEquals(
+                "OK",
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "a", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -315,7 +330,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
         assertEquals(
                 "ERROR: \"Different paths: Path in the store is 'a/f1.txt' "
                         + "and in the archive 'a/f3.txt'.\"",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -328,7 +343,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
                 "ERROR: \"The path 'a' should be in store and archive either "
                         + "both directories or files but not mixed: In the store it is a directory "
                         + "but in the archive it is a file.\"",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -341,7 +356,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
                 new MockContent(":0:0", "a/:0:0", "a/f2.txt:15:13").getRootNode();
         assertEquals(
                 "ERROR: \"The directory 'a' has in the store 2 files but 1 in the archive.\"",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -352,7 +367,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
         IHierarchicalContentNode root2 = new MockContent(":0:0", "r.txt:9:17").getRootNode();
         assertEquals(
                 "ERROR: \"The file 'r.txt' has in the store 7 bytes but 9 in the archive.\"",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -364,7 +379,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
         assertEquals(
                 "ERROR: \"The file 'r.txt' has in the store the checksum 00000017 "
                         + "but 00000018 in the archive.\"",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.YES).toString());
     }
 
@@ -374,7 +389,7 @@ public class RsyncArchiverTest extends AbstractArchiverTestCase
         IHierarchicalContentNode root2 = new MockContent(":0:0", "r.txt:7:18").getRootNode();
         assertEquals(
                 "OK",
-                RsyncArchiver.checkHierarchySizeAndChecksums(root1, root2,
+                RsyncArchiver.checkHierarchySizeAndChecksums(root1, "", root2,
                         RsyncArchiver.ChecksumVerificationCondition.NO).toString());
     }
 }