From 188a84c30a23ba921f2881bb506d10fe25186749 Mon Sep 17 00:00:00 2001
From: pkupczyk <pkupczyk>
Date: Tue, 1 Apr 2014 08:08:36 +0000
Subject: [PATCH] SSDM-11 - Switch the Strongbox Archiver to use tar - tests

SVN: 31253
---
 .../standard/AbstractPackageArchiverTest.java | 868 ++++++++++++++++++
 .../plugins/standard/TarArchiverTest.java     | 145 +++
 .../plugins/standard/ZipArchiverTest.java     | 842 +----------------
 3 files changed, 1032 insertions(+), 823 deletions(-)
 create mode 100644 datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractPackageArchiverTest.java
 create mode 100644 datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarArchiverTest.java

diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractPackageArchiverTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractPackageArchiverTest.java
new file mode 100644
index 00000000000..73cc13f433a
--- /dev/null
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractPackageArchiverTest.java
@@ -0,0 +1,868 @@
+/*
+ * Copyright 2013 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard;
+
+import static ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.AbstractArchiverProcessingPlugin.SHARE_FINDER_KEY;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.jmock.Expectations;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import ch.rinn.restrictions.Friend;
+import ch.systemsx.cisd.common.exceptions.Status;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.test.AssertionUtil;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.DefaultFileBasedHierarchicalContentFactory;
+import ch.systemsx.cisd.openbis.common.io.hierarchical_content.ZipBasedHierarchicalContentTest;
+import ch.systemsx.cisd.openbis.dss.generic.server.AbstractDataSetPackager;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IArchiverPlugin;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ProcessingStatus;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataSetArchivingStatus;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DatasetLocation;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Experiment;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.PhysicalDataSet;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataSetBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataStoreBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.ExperimentBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.PersonBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.SampleBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
+import ch.systemsx.cisd.openbis.generic.shared.translator.DataSetTranslator;
+
+/**
+ * @author Franz-Josef Elmer
+ */
+@Friend(toClasses = { AbstractArchiverProcessingPlugin.class, RsyncArchiver.class })
+public abstract class AbstractPackageArchiverTest extends AbstractArchiverTestCase
+{
+    protected static final String LOCATION = "a/b/c/ds1";
+
+    protected static final String SHARE_ID = "1";
+
+    protected static final String DATA_SET_CODE = "ds1";
+
+    protected static final File HDF5_ARCHIVE = new File(
+            "../openbis-common/resource/test-data/HDF5ContainerBasedHierarchicalContentNodeTest/thumbnails.h5");
+
+    protected File defaultArchive;
+
+    protected File ds1InStore;
+
+    protected DefaultFileBasedHierarchicalContentFactory contentFactory;
+
+    protected File archives;
+
+    protected File helloFile;
+
+    protected File readMeFile;
+
+    protected File emptyFolder;
+
+    protected void init()
+    {
+    }
+
+    protected abstract IArchiverPlugin createArchiver();
+
+    protected abstract String getPackageExtension();
+
+    protected abstract void assertPackageFileContent(File expectedContent, File file, String path, boolean compressed);
+
+    protected abstract void assertPackageFileContent(String expectedContent, File packageFile, String path, boolean compressed);
+
+    protected abstract void assertPackageDirectory(File file, String path);
+
+    @BeforeMethod
+    public void prepareTestData(Method method) throws IOException
+    {
+        wait(1); // Without waiting sometimes the meta data from a previous test is extracted
+        ds1InStore = new File(share1, LOCATION);
+        File subfolder = new File(ds1InStore, "original/my-data/subfolder");
+        subfolder.mkdirs();
+        FileUtils.copyFile(HDF5_ARCHIVE, new File(subfolder, "my-archive.h5"));
+        helloFile = new File(subfolder, "hello.txt");
+        FileUtilities.writeToFile(helloFile, "Hello world!");
+        readMeFile = new File(subfolder.getParentFile(), "read-me.txt");
+        emptyFolder = new File(subfolder.getParentFile(), "empty-folder");
+        emptyFolder.mkdirs();
+        FileUtilities.writeToFile(readMeFile, "Nothing to read!");
+        defaultArchive = new File(workingDirectory, "default-archive");
+        defaultArchive.mkdirs();
+        archives = new File(workingDirectory, "archives");
+        archives.mkdirs();
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.DEFAULT_DESTINATION_KEY, defaultArchive.getPath());
+        contentFactory = new DefaultFileBasedHierarchicalContentFactory();
+    }
+
+    @Test
+    public void testArchivingFlatToDefaultArchive()
+    {
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E")
+                .registrator(new PersonBuilder().name("Albert", "Einstein").getPerson())
+                .property("E-PROP", "42").getExperiment();
+        experiment.setRegistrationDate(new Date(98765));
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).fileFormat("ABC")
+                        .registrationDate(new Date(12345)).store(new DataStoreBuilder(DATA_STORE_CODE).getStore())
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(experiment);
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+
+        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+
+        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + getPackageExtension());
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
+                + "Data set 'ds1' archived: " + archivedDataSetFile, logRecorder.getLogContent());
+        List<Status> errorStatuses = processingStatus.getErrorStatuses();
+        assertEquals("[]", errorStatuses.toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tparent_codes\t\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE1\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t1970-01-01 01:01:38 +0100\n"
+                + "experiment\tregistrator\tAlbert Einstein\n"
+                + "experiment\tE-PROP\t42\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
+    }
+
+    @Test
+    public void testArchivingFlatToSpaceMappedArchive()
+    {
+        File mappingFile = new File(workingDirectory, "mapping.tsv");
+        File archive = new File(archives, "my-archive");
+        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
+        Sample sample = new SampleBuilder("/S/S1").type("MY-S").property("ANSWER", "42").getSample();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).sample(sample).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(experiment);
+        prepareTryGetSample(sample);
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+
+        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+
+        File archivedDataSetFile = new File(archive, ds1.getDataSetCode() + getPackageExtension());
+        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
+                + "' successfully loaded.\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        List<Status> errorStatuses = processingStatus.getErrorStatuses();
+        assertEquals("[]", errorStatuses.toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tparent_codes\t\n"
+                + "sample\ttype_code\tMY-S\n"
+                + "sample\tcode\tS1\n"
+                + "sample\tspace_code\tS\n"
+                + "sample\tregistration_timestamp\t\n"
+                + "sample\tregistrator\t\n"
+                + "sample\tANSWER\t42\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE1\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t\n"
+                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
+        File[] unzippedFiles = ZipBasedHierarchicalContentTest.getUnzippedFiles();
+        assertEquals("[]", Arrays.asList(unzippedFiles).toString());
+    }
+
+    @Test
+    public void testUnarchivingFromDefaultArchiveNoShardingWithCompression()
+    {
+        properties.setProperty(SHARE_FINDER_KEY + ".class", ShareFinder.class.getName());
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).size(28)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        final DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(experiment);
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.ARCHIVED, true);
+        context.checking(new Expectations()
+            {
+                {
+                    one(deleter).scheduleDeletionOfDataSets(Arrays.asList(dsd1), 11, 10);
+                }
+            });
+        // archive
+        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, true);
+        assertEquals("[]", processingStatus.getErrorStatuses().toString());
+        FileUtilities.deleteRecursively(ds1InStore); // delete in store
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        context.checking(new Expectations()
+            {
+                {
+                    one(configProvider).getDataStoreCode();
+                    will(returnValue(DATA_STORE_CODE));
+                }
+            });
+
+        processingStatus = archiver.unarchive(Arrays.asList(dsd1), archiverTaskContext);
+
+        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + getPackageExtension());
+        String logContent = logRecorder.getLogContent().replaceFirst("in all shares in .*s", "in all shares in ? s");
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile + "\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Obtained the list of all datasets in all shares in ? s.\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
+                + "Data set 'ds1' retrieved from archive '" + archivedDataSetFile + "' to '"
+                + ds1InStore + "'.", logContent);
+        assertEquals("[]", processingStatus.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tparent_codes\t\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE1\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t\n"
+                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
+        assertPackageDirectory(archivedDataSetFile, "original/my-data/empty-folder/");
+        assertEquals(true, ds1InStore.exists());
+        assertEquals("Hello world!", FileUtilities.loadToString(helloFile).trim());
+        assertEquals("Nothing to read!", FileUtilities.loadToString(readMeFile).trim());
+        assertEquals(true, emptyFolder.exists());
+        assertEquals(true, emptyFolder.isDirectory());
+    }
+
+   
+
+    @Test
+    public void testArchivingTwiceWithIgnoreExistingSetToFalse()
+    {
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.IGNORE_EXISTING_KEY, "false");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        ds1.setExperiment(new ExperimentBuilder().identifier("/S/P/E2").type("MY-E").getExperiment());
+        dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+
+        ProcessingStatus processingStatus2 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+
+        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + getPackageExtension());
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile + "\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tparent_codes\t\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE2\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t\n"
+                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
+    }
+
+    @Test
+    public void testArchivingTwiceWithIgnoreExistingSetToTrue()
+    {
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.IGNORE_EXISTING_KEY, "true");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        ds1.setExperiment(new ExperimentBuilder().identifier("/S/P/E2").type("MY-E").getExperiment());
+        dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+
+        ProcessingStatus processingStatus2 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+
+        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + getPackageExtension());
+        AssertionUtil
+                .assertContainsLines(
+                        "INFO  OPERATION.AbstractDatastorePlugin - "
+                                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                                + archivedDataSetFile
+                                + "\n"
+                                + "INFO  OPERATION.AbstractDatastorePlugin - "
+                                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' will be ignored as it already exists in the archive."
+                        , logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tparent_codes\t\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE1\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t\n"
+                + "experiment\tregistrator\t\n"
+                + "experiment\tE-PROP\t42\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
+    }
+
+    @Test
+    public void testDeleteFromArchive()
+    {
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
+        properties.setProperty(RsyncArchiver.ONLY_MARK_AS_DELETED_KEY, "false");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + getPackageExtension());
+        assertEquals(true, archivedDataSetFile.exists());
+
+        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
+                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
+
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(false, archivedDataSetFile.exists());
+    }
+
+    @Test
+    public void testMarkAsDeletedFromArchive()
+    {
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + getPackageExtension());
+        assertEquals(true, archivedDataSetFile.exists());
+
+        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
+                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
+
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.exists());
+        File markerFile = new File(defaultArchive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
+                + "/" + ds1.getDataSetCode());
+        assertEquals(true, markerFile.exists());
+    }
+
+    @Test
+    public void testMarkAsDeletedFromArchiveWithSharding()
+    {
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + getPackageExtension());
+        assertEquals(true, archivedDataSetFile.exists());
+
+        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
+                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
+
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.exists());
+        File markerFile = new File(defaultArchive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
+                + "/" + ds1.getDataSetCode());
+        assertEquals(true, markerFile.exists());
+    }
+
+    @Test
+    public void testMarkAsDeletedFromArchiveWithShardingAndMapping()
+    {
+        File mappingFile = new File(workingDirectory, "mapping.tsv");
+        File archive = new File(archives, "my-archive");
+        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(ds1.getExperiment());
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
+        File archivedDataSetFile = new File(archive, LOCATION + "/" + ds1.getDataSetCode() + getPackageExtension());
+        assertEquals(true, archivedDataSetFile.exists());
+
+        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
+                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
+
+        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
+                + "' successfully loaded.\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.exists());
+        File markerFile = new File(archive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
+                + "/" + ds1.getDataSetCode());
+        assertEquals(true, markerFile.exists());
+    }
+    
+    public void testArchivingWithShardingWithoutCompressingToDefaultArchive()
+    {
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
+        properties.setProperty(ZipPackageManager.COMPRESS_KEY, "false");
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC").property("ANSWER", "42")
+                        .experiment(experiment).getDataSet();
+        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareUpdateShareIdAndSize(537669);
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(experiment);
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+
+        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
+
+        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + getPackageExtension());
+        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile, logRecorder.getLogContent());
+        List<Status> errorStatuses = processingStatus.getErrorStatuses();
+        assertEquals("[]", errorStatuses.toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertPackageFileContent("data_set\tcode\tds1\n"
+                + "data_set\tproduction_timestamp\t\n"
+                + "data_set\tproducer_code\t\n"
+                + "data_set\tdata_set_type\tMY-TYPE\n"
+                + "data_set\tis_measured\tTRUE\n"
+                + "data_set\tis_complete\tFALSE\n"
+                + "data_set\tANSWER\t42\n"
+                + "data_set\tparent_codes\t\n"
+                + "experiment\tspace_code\tS\n"
+                + "experiment\tproject_code\tP\n"
+                + "experiment\texperiment_code\tE1\n"
+                + "experiment\texperiment_type_code\tMY-E\n"
+                + "experiment\tregistration_timestamp\t\n"
+                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, false);
+        assertPackageFileContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", false);
+        assertPackageFileContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", false);
+        assertPackageFileContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", false);
+    }
+
+    @Test
+    public void testUnarchivingFromSpaceMappedArchiveWithShardingWithoutCompression()
+    {
+        File mappingFile = new File(workingDirectory, "mapping.tsv");
+        File archive = new File(archives, "my-archive");
+        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
+        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
+        properties.setProperty(ZipPackageManager.COMPRESS_KEY, "false");
+        properties.setProperty(SHARE_FINDER_KEY + ".class", ShareFinder.class.getName());
+        IArchiverPlugin archiver = createArchiver();
+        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
+        PhysicalDataSet ds1 =
+                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).size(28)
+                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
+                        .experiment(experiment).getDataSet();
+        final DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
+        prepareGetShareId();
+        prepareGetDataSetDirectory(dsd1);
+        prepareTryGetDataSet(ds1);
+        prepareTryGetExperiment(experiment);
+        prepareLockAndReleaseDataSet(ds1.getCode());
+        prepareGetDataSetDirectory("");
+        prepareGetDataSetDirectory(LOCATION);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareAsContent(ds1.getCode(), ds1InStore);
+        prepareUpdateStatus(DataSetArchivingStatus.ARCHIVED, true);
+        context.checking(new Expectations()
+            {
+                {
+                    one(deleter).scheduleDeletionOfDataSets(Arrays.asList(dsd1), 11, 10);
+                }
+            });
+        // archive
+        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, true);
+        assertEquals("[]", processingStatus.getErrorStatuses().toString());
+        FileUtilities.deleteRecursively(ds1InStore); // delete in store
+        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
+        context.checking(new Expectations()
+            {
+                {
+                    one(configProvider).getDataStoreCode();
+                    will(returnValue(DATA_STORE_CODE));
+                }
+            });
+
+        processingStatus = archiver.unarchive(Arrays.asList(dsd1), archiverTaskContext);
+
+        File archivedDataSetFile = new File(archive, LOCATION + "/" + ds1.getDataSetCode() + getPackageExtension());
+        String logContent = logRecorder.getLogContent().replaceFirst("in all shares in .*s", "in all shares in ? s");
+        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
+                + "' successfully loaded.\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
+                + archivedDataSetFile + "\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Obtained the list of all datasets in all shares in ? s.\n"
+                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
+                + "Data set 'ds1' retrieved from archive '" + archivedDataSetFile + "' to '"
+                + ds1InStore + "'.", logContent);
+        assertEquals("[]", processingStatus.getErrorStatuses().toString());
+        assertEquals(true, archivedDataSetFile.isFile());
+        assertEquals(true, ds1InStore.exists());
+        assertEquals("Hello world!", FileUtilities.loadToString(helloFile).trim());
+        assertEquals("Nothing to read!", FileUtilities.loadToString(readMeFile).trim());
+        assertEquals(true, emptyFolder.exists());
+        assertEquals(true, emptyFolder.isDirectory());
+    }
+
+    protected void prepareAsContent(final String dataSetCode, final File file)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(contentProvider).asContent(dataSetCode);
+                    will(returnValue(contentFactory.asHierarchicalContent(file, null)));
+                }
+            });
+    }
+
+    protected void prepareTryGetExperiment(final Experiment experiment)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(service).tryGetExperiment(ExperimentIdentifierFactory.parse(experiment.getIdentifier()));
+                    will(returnValue(experiment));
+                }
+            });
+    }
+
+    protected void prepareTryGetSample(final Sample sample)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(service).tryGetSampleWithExperiment(SampleIdentifierFactory.parse(sample.getIdentifier()));
+                    will(returnValue(sample));
+                }
+            });
+    }
+
+    protected void prepareTryGetDataSet(final AbstractExternalData dataSet)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(service).tryGetDataSet(dataSet.getCode());
+                    will(returnValue(dataSet));
+                }
+            });
+    }
+
+    protected void prepareGetDataSetDirectory(final DatasetDescription dataSet)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    atLeast(1).of(dataSetDirectoryProvider).getDataSetDirectory(dataSet);
+                    will(returnValue(ds1InStore));
+                }
+            });
+    }
+
+    protected void prepareGetDataSetDirectory(final String location)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(dataSetDirectoryProvider).getDataSetDirectory(SHARE_ID, location);
+                    will(returnValue(ds1InStore));
+                }
+            });
+    }
+
+    protected void prepareUpdateStatus(final DataSetArchivingStatus status, final boolean presentInArchive)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(statusUpdater).update(Arrays.asList(DATA_SET_CODE), status, presentInArchive);
+                }
+            });
+    }
+
+    protected void prepareUpdateShareIdAndSize(final long size)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(service).updateShareIdAndSize(DATA_SET_CODE, SHARE_ID, size);
+                }
+            });
+    }
+
+    protected void prepareLockAndReleaseDataSet(final String dataSetCode)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(shareIdManager).lock(dataSetCode);
+                    one(shareIdManager).releaseLock(dataSetCode);
+                }
+            });
+    }
+
+    protected void prepareGetShareId()
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    allowing(shareIdManager).getShareId(DATA_SET_CODE);
+                    will(returnValue(SHARE_ID));
+                }
+            });
+    }
+
+    protected void wait(int seconds)
+    {
+        try
+        {
+            Thread.sleep(seconds * 1000L);
+        } catch (InterruptedException ex)
+        {
+            // ignored
+        }
+    }
+
+}
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarArchiverTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarArchiverTest.java
new file mode 100644
index 00000000000..8837a2368bf
--- /dev/null
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/TarArchiverTest.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2013 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.UUID;
+
+import org.apache.commons.io.FileUtils;
+import org.testng.annotations.Test;
+
+import ch.rinn.restrictions.Friend;
+import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.filesystem.tar.Untar;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IArchiverPlugin;
+
+/**
+ * @author Franz-Josef Elmer
+ */
+@Friend(toClasses = { AbstractArchiverProcessingPlugin.class, RsyncArchiver.class })
+public class TarArchiverTest extends AbstractPackageArchiverTest
+{
+
+    @Override
+    protected IArchiverPlugin createArchiver()
+    {
+        TarArchiver archiver = new TarArchiver(properties, store);
+        archiver.statusUpdater = statusUpdater;
+        return archiver;
+    }
+
+    @Override
+    protected String getPackageExtension()
+    {
+        return ".tar";
+    }
+
+    @Override
+    protected void assertPackageFileContent(final File expectedContent, final File packageFile, String path, boolean compressed)
+    {
+        try
+        {
+            assertPackageFileContent(FileUtils.readFileToString(expectedContent), packageFile, path, compressed);
+        } catch (IOException e)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(e);
+        }
+    }
+
+    @Test
+    public void test()
+    {
+
+    }
+
+    @Override
+    protected void assertPackageFileContent(final String expectedContent, File packageFile, final String path, boolean compressed)
+    {
+        assertFileFromPackage(packageFile, path, new IAssertAction()
+            {
+
+                @Override
+                public void assertFileFromPackage(File fileFromPackage) throws Exception
+                {
+                    assertTrue(fileFromPackage.exists());
+                    if (fileFromPackage.isDirectory())
+                    {
+                        fail("Directory path: " + path);
+                    }
+                    assertEquals(expectedContent, FileUtils.readFileToString(fileFromPackage));
+                }
+            });
+    }
+
+    @Override
+    protected void assertPackageDirectory(File packageFile, final String path)
+    {
+        assertFileFromPackage(packageFile, path, new IAssertAction()
+            {
+                @Override
+                public void assertFileFromPackage(File fileFromPackage)
+                {
+                    assertTrue("Not a directory entry: " + path, fileFromPackage.isDirectory());
+                }
+            });
+    }
+
+    private void assertFileFromPackage(File packageFile, String path, IAssertAction assertAction)
+    {
+        Untar untar = null;
+        File extractTo = null;
+
+        try
+        {
+            File temp = new File(System.getProperty("java.io.tmpdir"));
+            extractTo = new File(temp, UUID.randomUUID().toString());
+
+            untar = new Untar(packageFile);
+            untar.extract(extractTo);
+
+            File fileFromPackage = new File(extractTo, path);
+
+            assertAction.assertFileFromPackage(fileFromPackage);
+        } catch (Exception ex)
+        {
+            throw CheckedExceptionTunnel.wrapIfNecessary(ex);
+        } finally
+        {
+            if (untar != null)
+            {
+                try
+                {
+                    untar.close();
+                } catch (IOException e)
+                {
+                    throw CheckedExceptionTunnel.wrapIfNecessary(e);
+                }
+            }
+            if (extractTo != null)
+            {
+                FileUtilities.deleteRecursively(extractTo);
+            }
+        }
+    }
+
+    private static interface IAssertAction
+    {
+        public void assertFileFromPackage(File fileFromPackage) throws Exception;
+    }
+}
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipArchiverTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipArchiverTest.java
index dfe2d970876..adfcd3f10c8 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipArchiverTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/ZipArchiverTest.java
@@ -16,727 +16,47 @@
 
 package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard;
 
-import static ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.AbstractArchiverProcessingPlugin.SHARE_FINDER_KEY;
-
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.List;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 
-import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.jmock.Expectations;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
 
 import ch.rinn.restrictions.Friend;
 import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
-import ch.systemsx.cisd.common.exceptions.Status;
-import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.test.AssertionUtil;
-import ch.systemsx.cisd.openbis.common.io.hierarchical_content.DefaultFileBasedHierarchicalContentFactory;
 import ch.systemsx.cisd.openbis.common.io.hierarchical_content.ZipBasedHierarchicalContentTest;
-import ch.systemsx.cisd.openbis.dss.generic.server.AbstractDataSetPackager;
-import ch.systemsx.cisd.openbis.dss.generic.shared.ProcessingStatus;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataSetArchivingStatus;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DatasetLocation;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Experiment;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.PhysicalDataSet;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Sample;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataSetBuilder;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataStoreBuilder;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.ExperimentBuilder;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.PersonBuilder;
-import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.SampleBuilder;
-import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
-import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifierFactory;
-import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifierFactory;
-import ch.systemsx.cisd.openbis.generic.shared.translator.DataSetTranslator;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IArchiverPlugin;
 
 /**
  * @author Franz-Josef Elmer
  */
 @Friend(toClasses = { AbstractArchiverProcessingPlugin.class, RsyncArchiver.class })
-public class ZipArchiverTest extends AbstractArchiverTestCase
+public class ZipArchiverTest extends AbstractPackageArchiverTest
 {
-    private static final String LOCATION = "a/b/c/ds1";
-
-    private static final String SHARE_ID = "1";
-
-    private static final String DATA_SET_CODE = "ds1";
-
-    private static final File HDF5_ARCHIVE = new File(
-            "../openbis-common/resource/test-data/HDF5ContainerBasedHierarchicalContentNodeTest/thumbnails.h5");
-
-    private File defaultArchive;
-
-    private File ds1InStore;
-
-    private DefaultFileBasedHierarchicalContentFactory contentFactory;
-
-    private File archives;
 
-    private File helloFile;
-
-    private File readMeFile;
-
-    private File emptyFolder;
-
-    @BeforeMethod
-    public void prepareTestData(Method method) throws IOException
+    @Override
+    protected void init()
     {
         ZipBasedHierarchicalContentTest.removeUnzippedFiles();
-        wait(1); // Without waiting sometimes the meta data from a previous test is extracted from zip file.
-        ds1InStore = new File(share1, LOCATION);
-        File subfolder = new File(ds1InStore, "original/my-data/subfolder");
-        subfolder.mkdirs();
-        FileUtils.copyFile(HDF5_ARCHIVE, new File(subfolder, "my-archive.h5"));
-        helloFile = new File(subfolder, "hello.txt");
-        FileUtilities.writeToFile(helloFile, "Hello world!");
-        readMeFile = new File(subfolder.getParentFile(), "read-me.txt");
-        emptyFolder = new File(subfolder.getParentFile(), "empty-folder");
-        emptyFolder.mkdirs();
-        FileUtilities.writeToFile(readMeFile, "Nothing to read!");
-        defaultArchive = new File(workingDirectory, "default-archive");
-        defaultArchive.mkdirs();
-        archives = new File(workingDirectory, "archives");
-        archives.mkdirs();
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.DEFAULT_DESTINATION_KEY, defaultArchive.getPath());
-        contentFactory = new DefaultFileBasedHierarchicalContentFactory();
-    }
-
-    @Test
-    public void testArchivingFlatToDefaultArchive()
-    {
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E")
-                .registrator(new PersonBuilder().name("Albert", "Einstein").getPerson())
-                .property("E-PROP", "42").getExperiment();
-        experiment.setRegistrationDate(new Date(98765));
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).fileFormat("ABC")
-                        .registrationDate(new Date(12345)).store(new DataStoreBuilder(DATA_STORE_CODE).getStore())
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(experiment);
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-
-        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-
-        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + ".zip");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
-                + "Data set 'ds1' archived: " + archivedDataSetFile, logRecorder.getLogContent());
-        List<Status> errorStatuses = processingStatus.getErrorStatuses();
-        assertEquals("[]", errorStatuses.toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tparent_codes\t\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE1\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t1970-01-01 01:01:38 +0100\n"
-                + "experiment\tregistrator\tAlbert Einstein\n"
-                + "experiment\tE-PROP\t42\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
     }
 
-    @Test
-    public void testArchivingWithShardingWithoutCompressingToDefaultArchive()
+    @Override
+    protected IArchiverPlugin createArchiver()
     {
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
-        properties.setProperty(ZipPackageManager.COMPRESS_KEY, "false");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC").property("ANSWER", "42")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(experiment);
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-
-        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-
-        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + ".zip");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        List<Status> errorStatuses = processingStatus.getErrorStatuses();
-        assertEquals("[]", errorStatuses.toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tANSWER\t42\n"
-                + "data_set\tparent_codes\t\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE1\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t\n"
-                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, false);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", false);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", false);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", false);
-    }
-
-    @Test
-    public void testArchivingFlatToSpaceMappedArchive()
-    {
-        File mappingFile = new File(workingDirectory, "mapping.tsv");
-        File archive = new File(archives, "my-archive");
-        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
-        Sample sample = new SampleBuilder("/S/S1").type("MY-S").property("ANSWER", "42").getSample();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).sample(sample).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(experiment);
-        prepareTryGetSample(sample);
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-
-        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-
-        File archivedDataSetFile = new File(archive, ds1.getDataSetCode() + ".zip");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
-                + "' successfully loaded.\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        List<Status> errorStatuses = processingStatus.getErrorStatuses();
-        assertEquals("[]", errorStatuses.toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tparent_codes\t\n"
-                + "sample\ttype_code\tMY-S\n"
-                + "sample\tcode\tS1\n"
-                + "sample\tspace_code\tS\n"
-                + "sample\tregistration_timestamp\t\n"
-                + "sample\tregistrator\t\n"
-                + "sample\tANSWER\t42\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE1\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t\n"
-                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
-        File[] unzippedFiles = ZipBasedHierarchicalContentTest.getUnzippedFiles();
-        assertEquals("[]", Arrays.asList(unzippedFiles).toString());
-    }
-
-    @Test
-    public void testUnarchivingFromDefaultArchiveNoShardingWithCompression()
-    {
-        properties.setProperty(SHARE_FINDER_KEY + ".class", ShareFinder.class.getName());
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).size(28)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        final DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(experiment);
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.ARCHIVED, true);
-        context.checking(new Expectations()
-            {
-                {
-                    one(deleter).scheduleDeletionOfDataSets(Arrays.asList(dsd1), 11, 10);
-                }
-            });
-        // archive
-        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, true);
-        assertEquals("[]", processingStatus.getErrorStatuses().toString());
-        FileUtilities.deleteRecursively(ds1InStore); // delete in store
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        context.checking(new Expectations()
-            {
-                {
-                    one(configProvider).getDataStoreCode();
-                    will(returnValue(DATA_STORE_CODE));
-                }
-            });
-
-        processingStatus = archiver.unarchive(Arrays.asList(dsd1), archiverTaskContext);
-
-        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + ".zip");
-        String logContent = logRecorder.getLogContent().replaceFirst("in all shares in .*s", "in all shares in ? s");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile + "\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - Obtained the list of all datasets in all shares in ? s.\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
-                + "Data set 'ds1' retrieved from archive '" + archivedDataSetFile + "' to '"
-                + ds1InStore + "'.", logContent);
-        assertEquals("[]", processingStatus.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tparent_codes\t\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE1\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t\n"
-                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
-        assertZipDirectoryEntry(archivedDataSetFile, "original/my-data/empty-folder/");
-        assertEquals(true, ds1InStore.exists());
-        assertEquals("Hello world!", FileUtilities.loadToString(helloFile).trim());
-        assertEquals("Nothing to read!", FileUtilities.loadToString(readMeFile).trim());
-        assertEquals(true, emptyFolder.exists());
-        assertEquals(true, emptyFolder.isDirectory());
-    }
-
-    @Test
-    public void testUnarchivingFromSpaceMappedArchiveWithShardingWithoutCompression()
-    {
-        File mappingFile = new File(workingDirectory, "mapping.tsv");
-        File archive = new File(archives, "my-archive");
-        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
-        properties.setProperty(ZipPackageManager.COMPRESS_KEY, "false");
-        properties.setProperty(SHARE_FINDER_KEY + ".class", ShareFinder.class.getName());
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION).size(28)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        final DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(experiment);
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.ARCHIVED, true);
-        context.checking(new Expectations()
-            {
-                {
-                    one(deleter).scheduleDeletionOfDataSets(Arrays.asList(dsd1), 11, 10);
-                }
-            });
-        // archive
-        ProcessingStatus processingStatus = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, true);
-        assertEquals("[]", processingStatus.getErrorStatuses().toString());
-        FileUtilities.deleteRecursively(ds1InStore); // delete in store
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        context.checking(new Expectations()
-            {
-                {
-                    one(configProvider).getDataStoreCode();
-                    will(returnValue(DATA_STORE_CODE));
-                }
-            });
-
-        processingStatus = archiver.unarchive(Arrays.asList(dsd1), archiverTaskContext);
-
-        File archivedDataSetFile = new File(archive, LOCATION + "/" + ds1.getDataSetCode() + ".zip");
-        String logContent = logRecorder.getLogContent().replaceFirst("in all shares in .*s", "in all shares in ? s");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
-                + "' successfully loaded.\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile + "\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - Obtained the list of all datasets in all shares in ? s.\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - "
-                + "Data set 'ds1' retrieved from archive '" + archivedDataSetFile + "' to '"
-                + ds1InStore + "'.", logContent);
-        assertEquals("[]", processingStatus.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertEquals(true, ds1InStore.exists());
-        assertEquals("Hello world!", FileUtilities.loadToString(helloFile).trim());
-        assertEquals("Nothing to read!", FileUtilities.loadToString(readMeFile).trim());
-        assertEquals(true, emptyFolder.exists());
-        assertEquals(true, emptyFolder.isDirectory());
-    }
-
-    @Test
-    public void testArchivingTwiceWithIgnoreExistingSetToFalse()
-    {
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.IGNORE_EXISTING_KEY, "false");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        ds1.setExperiment(new ExperimentBuilder().identifier("/S/P/E2").type("MY-E").getExperiment());
-        dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-
-        ProcessingStatus processingStatus2 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-
-        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + ".zip");
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile + "\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tparent_codes\t\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE2\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t\n"
-                + "experiment\tregistrator\t\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
-    }
-
-    @Test
-    public void testArchivingTwiceWithIgnoreExistingSetToTrue()
-    {
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.IGNORE_EXISTING_KEY, "true");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        ds1.setExperiment(new ExperimentBuilder().identifier("/S/P/E2").type("MY-E").getExperiment());
-        dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-
-        ProcessingStatus processingStatus2 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-
-        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + ".zip");
-        AssertionUtil
-                .assertContainsLines(
-                        "INFO  OPERATION.AbstractDatastorePlugin - "
-                                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                                + archivedDataSetFile
-                                + "\n"
-                                + "INFO  OPERATION.AbstractDatastorePlugin - "
-                                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' will be ignored as it already exists in the archive."
-                        , logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.isFile());
-        assertZipContent("data_set\tcode\tds1\n"
-                + "data_set\tproduction_timestamp\t\n"
-                + "data_set\tproducer_code\t\n"
-                + "data_set\tdata_set_type\tMY-TYPE\n"
-                + "data_set\tis_measured\tTRUE\n"
-                + "data_set\tis_complete\tFALSE\n"
-                + "data_set\tparent_codes\t\n"
-                + "experiment\tspace_code\tS\n"
-                + "experiment\tproject_code\tP\n"
-                + "experiment\texperiment_code\tE1\n"
-                + "experiment\texperiment_type_code\tMY-E\n"
-                + "experiment\tregistration_timestamp\t\n"
-                + "experiment\tregistrator\t\n"
-                + "experiment\tE-PROP\t42\n", archivedDataSetFile, AbstractDataSetPackager.META_DATA_FILE_NAME, true);
-        assertZipContent("Hello world!", archivedDataSetFile, "original/my-data/subfolder/hello.txt", true);
-        assertZipContent("Nothing to read!", archivedDataSetFile, "original/my-data/read-me.txt", true);
-        assertZipContent(HDF5_ARCHIVE, archivedDataSetFile, "original/my-data/subfolder/my-archive.h5", true);
-    }
-
-    @Test
-    public void testDeleteFromArchive()
-    {
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
-        properties.setProperty(RsyncArchiver.ONLY_MARK_AS_DELETED_KEY, "false");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + ".zip");
-        assertEquals(true, archivedDataSetFile.exists());
-
-        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
-                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
-
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(false, archivedDataSetFile.exists());
-    }
-
-    @Test
-    public void testMarkAsDeletedFromArchive()
-    {
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        File archivedDataSetFile = new File(defaultArchive, ds1.getDataSetCode() + ".zip");
-        assertEquals(true, archivedDataSetFile.exists());
-
-        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
-                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
-
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.exists());
-        File markerFile = new File(defaultArchive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
-                + "/" + ds1.getDataSetCode());
-        assertEquals(true, markerFile.exists());
-    }
-
-    @Test
-    public void testMarkAsDeletedFromArchiveWithSharding()
-    {
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        File archivedDataSetFile = new File(defaultArchive, LOCATION + "/" + ds1.getDataSetCode() + ".zip");
-        assertEquals(true, archivedDataSetFile.exists());
-
-        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
-                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
-
-        AssertionUtil.assertContainsLines("INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.exists());
-        File markerFile = new File(defaultArchive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
-                + "/" + ds1.getDataSetCode());
-        assertEquals(true, markerFile.exists());
+        ZipArchiver archiver = new ZipArchiver(properties, store);
+        archiver.statusUpdater = statusUpdater;
+        return archiver;
     }
 
-    @Test
-    public void testMarkAsDeletedFromArchiveWithShardingAndMapping()
+    @Override
+    protected String getPackageExtension()
     {
-        File mappingFile = new File(workingDirectory, "mapping.tsv");
-        File archive = new File(archives, "my-archive");
-        FileUtilities.writeToFile(mappingFile, "Space\tLive Share\tArchive Folder\n/S\t1\t" + archive + "\n");
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.MAPPING_FILE_KEY, mappingFile.getPath());
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.CREATE_ARCHIVES_KEY, "true");
-        properties.setProperty(DistributedPackagingDataSetFileOperationsManager.WITH_SHARDING_KEY, "true");
-        ZipArchiver archiver = createArchiver();
-        Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").type("MY-E").property("E-PROP", "42").getExperiment();
-        PhysicalDataSet ds1 =
-                new DataSetBuilder().code(DATA_SET_CODE).type("MY-TYPE").location(LOCATION)
-                        .store(new DataStoreBuilder(DATA_STORE_CODE).getStore()).fileFormat("ABC")
-                        .experiment(experiment).getDataSet();
-        DatasetDescription dsd1 = DataSetTranslator.translateToDescription(ds1);
-        prepareGetShareId();
-        prepareUpdateShareIdAndSize(537669);
-        prepareGetDataSetDirectory(dsd1);
-        prepareTryGetDataSet(ds1);
-        prepareTryGetExperiment(ds1.getExperiment());
-        prepareLockAndReleaseDataSet(ds1.getCode());
-        prepareGetDataSetDirectory("");
-        prepareGetDataSetDirectory(LOCATION);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareAsContent(ds1.getCode(), ds1InStore);
-        prepareUpdateStatus(DataSetArchivingStatus.AVAILABLE, true);
-        ProcessingStatus processingStatus1 = archiver.archive(Arrays.asList(dsd1), archiverTaskContext, false);
-        assertEquals("[]", processingStatus1.getErrorStatuses().toString());
-        File archivedDataSetFile = new File(archive, LOCATION + "/" + ds1.getDataSetCode() + ".zip");
-        assertEquals(true, archivedDataSetFile.exists());
-
-        ProcessingStatus processingStatus2 = archiver.deleteFromArchive(Arrays.asList(
-                new DatasetLocation(ds1.getCode(), ds1.getLocation(), DATA_STORE_CODE, "")));
-
-        AssertionUtil.assertContainsLines("INFO  OPERATION.IdentifierAttributeMappingManager - Mapping file '" + mappingFile
-                + "' successfully loaded.\n"
-                + "INFO  OPERATION.AbstractDatastorePlugin - "
-                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
-                + "INFO  OPERATION.DistributedPackagingDataSetFileOperationsManager - Data set 'ds1' archived: "
-                + archivedDataSetFile, logRecorder.getLogContent());
-        assertEquals("[]", processingStatus2.getErrorStatuses().toString());
-        assertEquals(true, archivedDataSetFile.exists());
-        File markerFile = new File(archive, DataSetFileOperationsManager.FOLDER_OF_AS_DELETED_MARKED_DATA_SETS
-                + "/" + ds1.getDataSetCode());
-        assertEquals(true, markerFile.exists());
+        return ".zip";
     }
 
-    private void assertZipContent(File expectedContent, File file, String path, boolean compressed)
+    @Override
+    protected void assertPackageFileContent(File expectedContent, File file, String path, boolean compressed)
     {
         try
         {
@@ -756,11 +76,12 @@ public class ZipArchiverTest extends AbstractArchiverTestCase
         }
     }
 
-    private void assertZipContent(String expectedContent, File file, String path, boolean compressed)
+    @Override
+    protected void assertPackageFileContent(String expectedContent, File packageFile, String path, boolean compressed)
     {
         try
         {
-            ZipFile zipFile = new ZipFile(file);
+            ZipFile zipFile = new ZipFile(packageFile);
             ZipEntry entry = zipFile.getEntry(path);
             assertNotNull("No entry for " + path, entry);
             if (entry.isDirectory())
@@ -775,7 +96,8 @@ public class ZipArchiverTest extends AbstractArchiverTestCase
         }
     }
 
-    private void assertZipDirectoryEntry(File file, String path)
+    @Override
+    protected void assertPackageDirectory(File file, String path)
     {
         try
         {
@@ -789,130 +111,4 @@ public class ZipArchiverTest extends AbstractArchiverTestCase
         }
     }
 
-    private void prepareAsContent(final String dataSetCode, final File file)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(contentProvider).asContent(dataSetCode);
-                    will(returnValue(contentFactory.asHierarchicalContent(file, null)));
-                }
-            });
-    }
-
-    private void prepareTryGetExperiment(final Experiment experiment)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(service).tryGetExperiment(ExperimentIdentifierFactory.parse(experiment.getIdentifier()));
-                    will(returnValue(experiment));
-                }
-            });
-    }
-
-    private void prepareTryGetSample(final Sample sample)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(service).tryGetSampleWithExperiment(SampleIdentifierFactory.parse(sample.getIdentifier()));
-                    will(returnValue(sample));
-                }
-            });
-    }
-
-    private void prepareTryGetDataSet(final AbstractExternalData dataSet)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(service).tryGetDataSet(dataSet.getCode());
-                    will(returnValue(dataSet));
-                }
-            });
-    }
-
-    private void prepareGetDataSetDirectory(final DatasetDescription dataSet)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    atLeast(1).of(dataSetDirectoryProvider).getDataSetDirectory(dataSet);
-                    will(returnValue(ds1InStore));
-                }
-            });
-    }
-
-    private void prepareGetDataSetDirectory(final String location)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(dataSetDirectoryProvider).getDataSetDirectory(SHARE_ID, location);
-                    will(returnValue(ds1InStore));
-                }
-            });
-    }
-
-    private void prepareUpdateStatus(final DataSetArchivingStatus status, final boolean presentInArchive)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(statusUpdater).update(Arrays.asList(DATA_SET_CODE), status, presentInArchive);
-                }
-            });
-    }
-
-    private void prepareUpdateShareIdAndSize(final long size)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(service).updateShareIdAndSize(DATA_SET_CODE, SHARE_ID, size);
-                }
-            });
-    }
-
-    private void prepareLockAndReleaseDataSet(final String dataSetCode)
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    one(shareIdManager).lock(dataSetCode);
-                    one(shareIdManager).releaseLock(dataSetCode);
-                }
-            });
-    }
-
-    private void prepareGetShareId()
-    {
-        context.checking(new Expectations()
-            {
-                {
-                    allowing(shareIdManager).getShareId(DATA_SET_CODE);
-                    will(returnValue(SHARE_ID));
-                }
-            });
-    }
-
-    private ZipArchiver createArchiver()
-    {
-        ZipArchiver archiver = new ZipArchiver(properties, store);
-        archiver.statusUpdater = statusUpdater;
-        return archiver;
-    }
-
-    private void wait(int seconds)
-    {
-        try
-        {
-            Thread.sleep(seconds * 1000L);
-        } catch (InterruptedException ex)
-        {
-            // ignored
-        }
-    }
-
 }
-- 
GitLab