From ed77983c454316f84fb129fa6a85304be3e1973e Mon Sep 17 00:00:00 2001
From: felmer <felmer>
Date: Wed, 21 Jan 2015 11:27:18 +0000
Subject: [PATCH] SSDM-1178: Introducing MultiDataSetUnarchivingMaintenanceTask
 and test. New column UNARCHIVING_REQUESTED in database table CONTAINERS.
 Modify MultiDataSetArchiver to request delayed unarchiving.

SVN: 33293
---
 .../AbstractArchiverProcessingPlugin.java     |  13 +-
 .../archiver/MultiDataSetArchiver.java        |  52 ++-
 ...ultiDataSetUnarchivingMaintenanceTask.java | 151 +++++++++
 .../IMultiDataSetArchiverDBTransaction.java   |   4 +
 .../IMultiDataSetArchiverQueryDAO.java        |  12 +
 ...IMultiDataSetArchiverReadonlyQueryDAO.java |   7 +-
 .../MultiDataSetArchiverContainerDTO.java     |  15 +-
 .../MultiDataSetArchiverDBTransaction.java    |  12 +
 .../MultiDataSetArchiverDBVersionHolder.java  |   2 +-
 .../generic/shared/ArchiverTaskContext.java   |  12 +
 .../postgresql/002/schema-002.sql             |  38 +++
 .../migration/migration-001-002.sql           |   3 +
 .../archiver/MultiDataSetArchiverTest.java    | 122 ++++++-
 ...DataSetUnarchivingMaintenanceTaskTest.java | 315 ++++++++++++++++++
 14 files changed, 735 insertions(+), 23 deletions(-)
 create mode 100644 datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTask.java
 create mode 100644 datastore_server/source/sql/multi-dataset-archive/postgresql/002/schema-002.sql
 create mode 100644 datastore_server/source/sql/multi-dataset-archive/postgresql/migration/migration-001-002.sql
 create mode 100644 datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTaskTest.java

diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
index bf15dba3a4a..c436bb5f6f4 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/AbstractArchiverProcessingPlugin.java
@@ -354,6 +354,11 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
     {
         operationLog.info("Unarchiving of the following datasets has been requested: "
                 + CollectionUtils.abbreviate(datasets, 10));
+        if (delayUnarchiving(datasets, context))
+        {
+            operationLog.info("Unarchiving delayed");
+            return createStatuses(Status.OK, datasets, Operation.UNARCHIVE).getProcessingStatus();
+        }
         DatasetProcessingStatuses statuses = safeUnarchive(datasets, context);
 
         asyncUpdateStatuses(statuses.getSuccessfulDatasetCodes(), AVAILABLE, true);
@@ -361,10 +366,10 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
 
         return statuses.getProcessingStatus();
     }
-
-    private void setUpUnarchivingPreparation(ArchiverTaskContext context)
+    
+    protected boolean delayUnarchiving(List<DatasetDescription> datasets, ArchiverTaskContext context)
     {
-        context.setUnarchivingPreparation(getUnarchivingPreparation());
+        return false;
     }
 
     protected IUnarchivingPreparation getUnarchivingPreparation()
@@ -398,7 +403,7 @@ public abstract class AbstractArchiverProcessingPlugin extends AbstractDatastore
         {
             try
             {
-                setUpUnarchivingPreparation(context);
+                context.setUnarchivingPreparation(getUnarchivingPreparation());
                 statuses = doUnarchive(datasets, context);
             } catch (Throwable t)
             {
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiver.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiver.java
index 32da11a2ef5..06bf82611d4 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiver.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiver.java
@@ -115,10 +115,6 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
         }
     }
 
-    private final long minimumContainerSize;
-
-    private final long maximumContainerSize;
-
     public static final String MINIMUM_CONTAINER_SIZE_IN_BYTES = "minimum-container-size-in-bytes";
 
     public static final Long DEFAULT_MINIMUM_CONTAINER_SIZE_IN_BYTES = 10 * FileUtils.ONE_GB;
@@ -131,6 +127,8 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
     
     public static final long DEFAULT_FINALIZER_MAX_WAITING_TIME = DateUtils.MILLIS_PER_DAY;
     
+    public static final String DELAY_UNARCHIVING = "delay-unarchiving";
+    
     public static final String CLEANER_PROPS = "cleaner";
 
     private transient IMultiDataSetArchiverReadonlyQueryDAO readonlyQuery;
@@ -138,7 +136,13 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
     private transient IDataStoreServiceInternal dataStoreService;
     
     private transient IMultiDataSetArchiveCleaner cleaner;
+    
+    private final long minimumContainerSize;
+    
+    private final long maximumContainerSize;
 
+    private final boolean delayUnarchiving;
+    
     private final long finalizerPollingTime;
 
     private final long finalizerMaxWaitingTime;
@@ -154,6 +158,7 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
             IFreeSpaceProvider freeSpaceProviderOrNull)
     {
         super(properties, storeRoot, null, null);
+        delayUnarchiving = PropertyUtils.getBoolean(properties, DELAY_UNARCHIVING, false);
         this.minimumContainerSize = PropertyUtils.getLong(properties, MINIMUM_CONTAINER_SIZE_IN_BYTES, DEFAULT_MINIMUM_CONTAINER_SIZE_IN_BYTES);
         this.maximumContainerSize = PropertyUtils.getLong(properties, MAXIMUM_CONTAINER_SIZE_IN_BYTES, DEFAULT_MAXIMUM_CONTAINER_SIZE_IN_BYTES);
         this.fileOperationsFactory = new FileOperationsManagerFactory(properties, timeProvider, freeSpaceProviderOrNull);
@@ -476,17 +481,46 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
     }
 
     @Override
-    protected DatasetProcessingStatuses doUnarchive(List<DatasetDescription> parameterDataSets, ArchiverTaskContext context)
+    protected boolean delayUnarchiving(List<DatasetDescription> datasets, ArchiverTaskContext context)
+    {
+        if (delayUnarchiving == false || context.isForceUnarchiving())
+        {
+            return false;
+        }
+        IMultiDataSetArchiverDBTransaction transaction = getTransaction();
+        try
+        {
+            List<String> dataSetCodes = translateToDataSetCodes(datasets);
+            transaction.requestUnarchiving(dataSetCodes);
+            transaction.commit();
+            transaction.close();
+        } catch (Exception e)
+        {
+            operationLog.warn("Requesting unarchiving of " + datasets.size() + " data sets failed", e);
+            try
+            {
+                transaction.rollback();
+                transaction.close();
+            } catch (Exception ex)
+            {
+                operationLog.warn("Rollback of multi dataset db transaction failed", ex);
+            }
+        }
+        return true;
+    }
+
+    @Override
+    protected DatasetProcessingStatuses doUnarchive(List<DatasetDescription> dataSets, ArchiverTaskContext context)
     {
-        List<String> dataSetCodes = translateToDataSetCodes(parameterDataSets);
+        List<String> dataSetCodes = translateToDataSetCodes(dataSets);
         long containerId = assertAllDataSetsInTheSameContainer(dataSetCodes);
         assertNoAvailableDatasets(dataSetCodes);
         
-        context.getUnarchivingPreparation().prepareForUnarchiving(parameterDataSets);
+        context.getUnarchivingPreparation().prepareForUnarchiving(dataSets);
 
         MultiDataSetArchiverContainerDTO container = getReadonlyQuery().getContainerForId(containerId);
 
-        getFileOperations().restoreDataSetsFromContainerInFinalDestination(container.getPath(), parameterDataSets);
+        getFileOperations().restoreDataSetsFromContainerInFinalDestination(container.getPath(), dataSets);
 
         for (String dataSetCode : dataSetCodes)
         {
@@ -494,7 +528,7 @@ public class MultiDataSetArchiver extends AbstractArchiverProcessingPlugin
         }
 
         DatasetProcessingStatuses result = new DatasetProcessingStatuses();
-        result.addResult(parameterDataSets, Status.OK, Operation.UNARCHIVE);
+        result.addResult(dataSets, Status.OK, Operation.UNARCHIVE);
         return result;
     }
 
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTask.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTask.java
new file mode 100644
index 00000000000..a3f7b9a3582
--- /dev/null
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTask.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2015 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.log4j.Logger;
+
+import ch.systemsx.cisd.common.collection.CollectionUtils;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
+import ch.systemsx.cisd.common.maintenance.IMaintenanceTask;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.IMultiDataSetArchiverDBTransaction;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.IMultiDataSetArchiverReadonlyQueryDAO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverContainerDTO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverDBTransaction;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverDataSetDTO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverDataSourceUtil;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ArchiverTaskContext;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IArchiverPlugin;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataSetDirectoryProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataStoreServiceInternal;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IHierarchicalContentProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+import ch.systemsx.cisd.openbis.generic.shared.translator.DataSetTranslator;
+
+/**
+ * Maintenance task for unarchiving multi data set archives.
+ *
+ * @author Franz-Josef Elmer
+ */
+public class MultiDataSetUnarchivingMaintenanceTask implements IMaintenanceTask
+{
+    private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION,
+            MultiDataSetUnarchivingMaintenanceTask.class);
+
+    @Override
+    public void setUp(String pluginName, Properties properties)
+    {
+    }
+
+    @Override
+    public void execute()
+    {
+        IMultiDataSetArchiverReadonlyQueryDAO dao = getReadonlyQuery();
+        List<MultiDataSetArchiverContainerDTO> containersForUnarchiving = dao.listContainersForUnarchiving();
+        IDataStoreServiceInternal dataStoreService = getDataStoreService();
+        IArchiverPlugin archiverPlugin = dataStoreService.getArchiverPlugin();
+        IDataSetDirectoryProvider directoryProvider = dataStoreService.getDataSetDirectoryProvider();
+        IHierarchicalContentProvider hierarchicalContentProvider = getHierarchicalContentProvider();
+        ArchiverTaskContext context = new ArchiverTaskContext(directoryProvider, hierarchicalContentProvider);
+        context.setForceUnarchiving(true);
+        for (MultiDataSetArchiverContainerDTO container : containersForUnarchiving)
+        {
+            List<MultiDataSetArchiverDataSetDTO> dataSets = dao.listDataSetsForContainerId(container.getId());
+            List<String> dataSetCodes = extractCodes(dataSets);
+            operationLog.info("Start unarchiving " + CollectionUtils.abbreviate(dataSetCodes, 20));
+            List<DatasetDescription> loadedDataSets = loadDataSets(dataSetCodes);
+            archiverPlugin.unarchive(loadedDataSets, context);
+            resetRequestUnarchiving(container);
+            operationLog.info("Unarchiving finished for " + CollectionUtils.abbreviate(dataSetCodes, 20));
+        }
+    }
+    
+    private void resetRequestUnarchiving(MultiDataSetArchiverContainerDTO container)
+    {
+        IMultiDataSetArchiverDBTransaction transaction = getTransaction();
+        try
+        {
+            transaction.resetRequestUnarchiving(container.getId());
+            transaction.commit();
+            transaction.close();
+        } catch (Exception e)
+        {
+            operationLog.warn("Reset request unarchiving of container " + container + " failed", e);
+            try
+            {
+                transaction.rollback();
+                transaction.close();
+            } catch (Exception ex)
+            {
+                operationLog.warn("Rollback of multi dataset db transaction failed", ex);
+            }
+        }
+    }
+    
+    private List<DatasetDescription> loadDataSets(List<String> dataSetCodes)
+    {
+        IEncapsulatedOpenBISService service = getASService();
+        List<DatasetDescription> result = new ArrayList<DatasetDescription>();
+        for (AbstractExternalData dataSet : service.listDataSetsByCode(dataSetCodes))
+        {
+            result.add(DataSetTranslator.translateToDescription(dataSet));
+        }
+        return result;
+    }
+    
+    private List<String> extractCodes(List<MultiDataSetArchiverDataSetDTO> dataSets)
+    {
+        List<String> codes = new ArrayList<String>();
+        for (MultiDataSetArchiverDataSetDTO dataSet : dataSets)
+        {
+            codes.add(dataSet.getCode());
+        }
+        return codes;
+    }
+
+    IEncapsulatedOpenBISService getASService()
+    {
+        return ServiceProvider.getOpenBISService();
+    }
+
+    IHierarchicalContentProvider getHierarchicalContentProvider()
+    {
+        return ServiceProvider.getHierarchicalContentProvider();
+    }
+
+    IDataStoreServiceInternal getDataStoreService()
+    {
+        return ServiceProvider.getDataStoreService();
+    }
+    
+    IMultiDataSetArchiverReadonlyQueryDAO getReadonlyQuery()
+    {
+        return MultiDataSetArchiverDataSourceUtil.getReadonlyQueryDAO();
+    }
+    
+    IMultiDataSetArchiverDBTransaction getTransaction()
+    {
+        return new MultiDataSetArchiverDBTransaction();
+    }
+}
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverDBTransaction.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverDBTransaction.java
index a9f04d68873..16f6789f5ae 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverDBTransaction.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverDBTransaction.java
@@ -39,6 +39,10 @@ public interface IMultiDataSetArchiverDBTransaction
             MultiDataSetArchiverContainerDTO container);
 
     public MultiDataSetArchiverDataSetDTO getDataSetForCode(String code);
+    
+    public void requestUnarchiving(List<String> dataSetCodes);
+    
+    public void resetRequestUnarchiving(long containerId);
 
     public void commit();
 
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverQueryDAO.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverQueryDAO.java
index fd88f891720..f113f01460b 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverQueryDAO.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverQueryDAO.java
@@ -20,6 +20,8 @@ import net.lemnik.eodsql.Select;
 import net.lemnik.eodsql.TransactionQuery;
 import net.lemnik.eodsql.Update;
 
+import ch.systemsx.cisd.common.db.mapper.StringArrayMapper;
+
 /**
  * @author Jakub Straszewski
  */
@@ -61,4 +63,14 @@ public interface IMultiDataSetArchiverQueryDAO extends TransactionQuery, IMultiD
     
     @Update(sql = DELETE_CONTAINER)
     public void deleteContainer(String containerPath);
+    
+    final static String REQUEST_UNARCHIVING = "UPDATE containers SET unarchiving_requested = 't' "
+            + "WHERE id in (SELECT ctnr_id FROM data_sets WHERE code = any(?{1}))";
+    @Update(sql = REQUEST_UNARCHIVING, parameterBindings =
+        { StringArrayMapper.class })
+    public void requestUnarchiving(String[] dataSetCodes);
+    
+    final static String RESET_REQUEST_UNARCHIVING = "UPDATE containers SET unarchiving_requested = 'f' WHERE id = ?{1}";
+    @Update(sql = RESET_REQUEST_UNARCHIVING)
+    public void resetRequestUnarchiving(long containerId);
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverReadonlyQueryDAO.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverReadonlyQueryDAO.java
index 12eee012fb5..37cf2b77df1 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverReadonlyQueryDAO.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/IMultiDataSetArchiverReadonlyQueryDAO.java
@@ -31,12 +31,15 @@ public interface IMultiDataSetArchiverReadonlyQueryDAO extends BaseQuery
      * SELECT CONTAINER
      */
     final static String SELECT_CONTAINER =
-            " SELECT id, path "
+            " SELECT id, path, unarchiving_requested "
                     + "FROM containers ";
 
     @Select(sql = SELECT_CONTAINER + "WHERE id = ?{1}")
     public MultiDataSetArchiverContainerDTO getContainerForId(long containerId);
 
+    @Select(sql = SELECT_CONTAINER + "WHERE unarchiving_requested = 't'")
+    public List<MultiDataSetArchiverContainerDTO> listContainersForUnarchiving();
+    
     /*
      * SELECT DATA_SET
      */
@@ -53,5 +56,5 @@ public interface IMultiDataSetArchiverReadonlyQueryDAO extends BaseQuery
 
     @Select(sql = SELECT_DATA_SET + "WHERE ctnr_id = ?{1}")
     public List<MultiDataSetArchiverDataSetDTO> listDataSetsForContainerId(long containerId);
-
+    
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverContainerDTO.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverContainerDTO.java
index c69667450e0..bf121268a2f 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverContainerDTO.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverContainerDTO.java
@@ -25,6 +25,8 @@ public class MultiDataSetArchiverContainerDTO
     private long id;
 
     private String path;
+    
+    private boolean unarchivingRequested;
 
     public MultiDataSetArchiverContainerDTO()
     {
@@ -51,10 +53,21 @@ public class MultiDataSetArchiverContainerDTO
         return path;
     }
 
+    public boolean isUnarchivingRequested()
+    {
+        return unarchivingRequested;
+    }
+
+    public void setUnarchivingRequested(boolean unarchivingRequested)
+    {
+        this.unarchivingRequested = unarchivingRequested;
+    }
+
     @Override
     public String toString()
     {
-        return "MultiDataSetArchiverContainerDTO [id=" + id + ", path=" + path + "]";
+        return "MultiDataSetArchiverContainerDTO [id=" + id + ", path=" + path 
+                + ", unarchiving requested=" + unarchivingRequested + "]";
     }
 
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBTransaction.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBTransaction.java
index d2a2b16e5cb..4c4339ff799 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBTransaction.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBTransaction.java
@@ -88,6 +88,18 @@ public class MultiDataSetArchiverDBTransaction implements IMultiDataSetArchiverD
         return transaction.getDataSetForCode(code);
     }
 
+    @Override
+    public void requestUnarchiving(List<String> dataSetCodes)
+    {
+        transaction.requestUnarchiving(dataSetCodes.toArray(new String[dataSetCodes.size()]));
+    }
+
+    @Override
+    public void resetRequestUnarchiving(long containerId)
+    {
+        transaction.resetRequestUnarchiving(containerId);
+    }
+
     /**
      * @see net.lemnik.eodsql.TransactionQuery#commit()
      */
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBVersionHolder.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBVersionHolder.java
index d7fd41d1945..828d17b71e7 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBVersionHolder.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDataSetArchiverDBVersionHolder.java
@@ -26,6 +26,6 @@ public class MultiDataSetArchiverDBVersionHolder implements IDatabaseVersionHold
     @Override
     public String getDatabaseVersion()
     {
-        return "001";
+        return "002";
     }
 }
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/ArchiverTaskContext.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/ArchiverTaskContext.java
index 869cc608060..a22147c7513 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/ArchiverTaskContext.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/ArchiverTaskContext.java
@@ -44,6 +44,8 @@ public class ArchiverTaskContext
     private String userEmail;
 
     private String userSessionToken;
+    
+    private boolean forceUnarchiving;
 
     public ArchiverTaskContext(IDataSetDirectoryProvider directoryProvider,
             IHierarchicalContentProvider hierarchicalContentProvider)
@@ -105,5 +107,15 @@ public class ArchiverTaskContext
     {
         return userSessionToken;
     }
+
+    public boolean isForceUnarchiving()
+    {
+        return forceUnarchiving;
+    }
+
+    public void setForceUnarchiving(boolean delayedUnarchiving)
+    {
+        this.forceUnarchiving = delayedUnarchiving;
+    }
     
 }
diff --git a/datastore_server/source/sql/multi-dataset-archive/postgresql/002/schema-002.sql b/datastore_server/source/sql/multi-dataset-archive/postgresql/002/schema-002.sql
new file mode 100644
index 00000000000..129066e0db2
--- /dev/null
+++ b/datastore_server/source/sql/multi-dataset-archive/postgresql/002/schema-002.sql
@@ -0,0 +1,38 @@
+
+/* ---------------------------------------------------------------------- */
+/* Domains                                                                */
+/* ---------------------------------------------------------------------- */
+
+CREATE DOMAIN TECH_ID AS BIGINT;
+
+CREATE DOMAIN CODE AS VARCHAR(40);
+
+CREATE DOMAIN FILE_PATH AS VARCHAR(1000);
+
+CREATE DOMAIN BOOLEAN_CHAR AS boolean DEFAULT false;
+
+/* ---------------------------------------------------------------------- */
+/* Tables                                                                 */
+/* ---------------------------------------------------------------------- */
+
+CREATE TABLE CONTAINERS (
+  ID bigserial NOT NULL,
+  PATH FILE_PATH,
+  UNARCHIVING_REQUESTED BOOLEAN_CHAR DEFAULT false,
+
+  UNIQUE (PATH),
+  PRIMARY KEY (ID)
+);
+
+CREATE TABLE DATA_SETS (
+  ID bigserial NOT NULL,
+  CODE CODE NOT NULL,
+  CTNR_ID TECH_ID NOT NULL,
+  SIZE_IN_BYTES BIGINT NOT NULL, 
+
+  PRIMARY KEY (ID),
+  UNIQUE (CODE),
+  CONSTRAINT FK_DATA_SET_CONTAINERS FOREIGN KEY (CTNR_ID) REFERENCES CONTAINERS (ID) ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+CREATE INDEX DATA_SETS_CODE_IDX ON DATA_SETS (CODE);
diff --git a/datastore_server/source/sql/multi-dataset-archive/postgresql/migration/migration-001-002.sql b/datastore_server/source/sql/multi-dataset-archive/postgresql/migration/migration-001-002.sql
new file mode 100644
index 00000000000..44cd60dcc22
--- /dev/null
+++ b/datastore_server/source/sql/multi-dataset-archive/postgresql/migration/migration-001-002.sql
@@ -0,0 +1,3 @@
+CREATE DOMAIN BOOLEAN_CHAR AS boolean DEFAULT false;
+
+ALTER TABLE CONTAINERS ADD COLUMN UNARCHIVING_REQUESTED BOOLEAN_CHAR DEFAULT false;
\ No newline at end of file
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiverTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiverTest.java
index 7797f5c9d7f..a2425538bd5 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiverTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetArchiverTest.java
@@ -238,6 +238,43 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
             return result;
         }
 
+        @Override
+        public void requestUnarchiving(List<String> dataSetCodes)
+        {
+            for (String dataSetCode : dataSetCodes)
+            {
+                MultiDataSetArchiverDataSetDTO dataSet = getDataSetForCode(dataSetCode);
+                if (dataSet != null)
+                {
+                    MultiDataSetArchiverContainerDTO container = getContainerForId(dataSet.getContainerId());
+                    if (container != null)
+                    {
+                        container.setUnarchivingRequested(true);
+                    }
+                }
+            }
+        }
+
+        @Override
+        public List<MultiDataSetArchiverContainerDTO> listContainersForUnarchiving()
+        {
+            List<MultiDataSetArchiverContainerDTO> result = new ArrayList<MultiDataSetArchiverContainerDTO>();
+            for (MultiDataSetArchiverContainerDTO container : containers)
+            {
+                if (container.isUnarchivingRequested())
+                {
+                    result.add(container);
+                }
+            }
+            return result;
+        }
+
+        @Override
+        public void resetRequestUnarchiving(long containerId)
+        {
+            getContainerForId(containerId).setUnarchivingRequested(false);
+        }
+
         @Override
         public void commit()
         {
@@ -617,7 +654,7 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
                 + "      >experiment\tregistration_timestamp\t\n"
                 + "      >experiment\tregistrator\t\n", getArchiveFile(ds2));
         assertEquals("[ds2]: AVAILABLE true\n", statusUpdater.toString());
-        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds2-yyyyMMdd-HHmmss.tar]\n"
+        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds2-yyyyMMdd-HHmmss.tar, unarchiving requested=false]\n"
                 + "Data sets:\nMultiDataSetArchiverDataSetDTO [id=1, code=ds2, containerId=0, sizeInBytes=20]\n"
                 + "committed: true, rolledBack: false", removeTimeInformationFromContent(transaction.toString()));
         assertEquals(archive.getAbsolutePath(), freeSpaceRecorder.getRecordedObjects().get(0).getPath());
@@ -685,7 +722,7 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
                 + "      >experiment\tregistration_timestamp\t\n"
                 + "      >experiment\tregistrator\t\n", getArchiveFile(ds2));
         assertEquals("", statusUpdater.toString());
-        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds2-yyyyMMdd-HHmmss.tar]\n"
+        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds2-yyyyMMdd-HHmmss.tar, unarchiving requested=false]\n"
                 + "Data sets:\nMultiDataSetArchiverDataSetDTO [id=1, code=ds2, containerId=0, sizeInBytes=20]\n"
                 + "committed: true, rolledBack: false", removeTimeInformationFromContent(transaction.toString()));
         assertEquals(archive.getAbsolutePath(), freeSpaceRecorder.getRecordedObjects().get(0).getPath());
@@ -782,7 +819,7 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
                 + "      >experiment\tregistration_timestamp\t\n"
                 + "      >experiment\tregistrator\t\n", getArchiveFile(ds1));
         assertEquals("[ds1, ds2]: AVAILABLE true\n", statusUpdater.toString());
-        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds1-yyyyMMdd-HHmmss.tar]\n"
+        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=ds1-yyyyMMdd-HHmmss.tar, unarchiving requested=false]\n"
                 + "Data sets:\nMultiDataSetArchiverDataSetDTO [id=1, code=ds1, containerId=0, sizeInBytes=10]\n"
                 + "MultiDataSetArchiverDataSetDTO [id=2, code=ds2, containerId=0, sizeInBytes=20]\n"
                 + "committed: true, rolledBack: false", removeTimeInformationFromContent(transaction.toString()));
@@ -810,7 +847,7 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
         assertEquals("[]", status.getErrorStatuses().toString());
         assertEquals("[]", Arrays.asList(staging.list()).toString());
         assertEquals("[ds2]: AVAILABLE true\n", statusUpdater.toString());
-        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=path]\n"
+        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=path, unarchiving requested=false]\n"
                 + "Data sets:\nMultiDataSetArchiverDataSetDTO [id=1, code=ds2, containerId=0, sizeInBytes=20]\n"
                 + "committed: true, rolledBack: false", transaction.toString());
         context.assertIsSatisfied();
@@ -869,8 +906,8 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
                 + "      >experiment\tregistration_timestamp\t\n"
                 + "      >experiment\tregistrator\t\n", getArchiveFile(ds1));
         assertEquals("[ds1, ds2]: ARCHIVED true\n", statusUpdater.toString());
-        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=path]\n"
-                + "MultiDataSetArchiverContainerDTO [id=2, path=ds1-yyyyMMdd-HHmmss.tar]\n"
+        assertEquals("Containers:\nMultiDataSetArchiverContainerDTO [id=0, path=path, unarchiving requested=false]\n"
+                + "MultiDataSetArchiverContainerDTO [id=2, path=ds1-yyyyMMdd-HHmmss.tar, unarchiving requested=false]\n"
                 + "Data sets:\nMultiDataSetArchiverDataSetDTO [id=1, code=ds2, containerId=0, sizeInBytes=20]\n"
                 + "MultiDataSetArchiverDataSetDTO [id=3, code=ds1, containerId=2, sizeInBytes=10]\n"
                 + "committed: true, rolledBack: false", removeTimeInformationFromContent(transaction.toString()));
@@ -993,6 +1030,79 @@ public class MultiDataSetArchiverTest extends AbstractFileSystemTestCase
         context.assertIsSatisfied();
     }
 
+    @Test
+    public void testDelayUnarchiving()
+    {
+        MultiDataSetArchiverContainerDTO container = transaction.createContainer("c1");
+        ds1.setDataSetSize(10L);
+        ds2.setDataSetSize(20L);
+        transaction.insertDataset(ds1, container);
+        transaction.insertDataset(ds2, container);
+        transaction.commit();
+        properties.setProperty(MultiDataSetArchiver.DELAY_UNARCHIVING, "true");
+        properties.setProperty(MINIMUM_CONTAINER_SIZE_IN_BYTES, "15");
+        MultiDataSetArchiver archiver = createArchiver(null);
+        
+        ProcessingStatus status = archiver.unarchive(Arrays.asList(ds1, ds2), archiverContext);
+        
+        assertEquals("INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets "
+                + "has been requested: [Dataset 'ds1', Dataset 'ds2']\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Unarchiving delayed\n", getFilteredLogContent());
+        assertEquals("", statusUpdater.toString());
+        assertEquals("[]", status.getErrorStatuses().toString());
+        assertEquals("[MultiDataSetArchiverContainerDTO [id=0, path=c1, unarchiving requested=true]]", 
+                transaction.containers.toString());
+        context.assertIsSatisfied();
+    }
+    
+
+    @Test
+    public void testForceUnarchive()
+    {
+        prepareUpdateShareIdAndSize(ds1, 10);
+        prepareLockAndReleaseDataSet(ds1);
+        prepareFixedFreeSpace(35 * FileUtils.ONE_GB);
+        properties.setProperty(MultiDataSetArchiver.DELAY_UNARCHIVING, "true");
+        properties.setProperty(MINIMUM_CONTAINER_SIZE_IN_BYTES, "5");
+        MultiDataSetArchiver archiver = createArchiver(null);
+        ProcessingStatus status = archiver.archive(Arrays.asList(ds1), archiverContext, true);
+        assertEquals("INFO  OPERATION.AbstractDatastorePlugin - "
+                + "Archiving of the following datasets has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.MultiDataSetFileOperationsManager - Archive dataset ds1 in "
+                + staging.getAbsolutePath() + "/ds1-yyyyMMdd-HHmmss.tar\n"
+                + "INFO  OPERATION.MultiDataSetFileOperationsManager - Data sets archived: ds1-yyyyMMdd-HHmmss.tar\n"
+                + "INFO  OPERATION.MultiDataSetFileOperationsManager - Condition fulfilled after < 1sec, condition: "
+                + "Free space: 35.00 GB, needed space: 1.00 GB\n"
+                + "INFO  OPERATION.MultiDataSetFileOperationsManager - Copy archive container from '"
+                + staging.getAbsolutePath() + "/ds1-yyyyMMdd-HHmmss.tar' to '" + archive.getAbsolutePath() + "\n"
+                + "INFO  OPERATION.MultiDataSetFileOperationsManager - Copying archive container took 0:??:??.???\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Start sanity check on [Dataset 'ds1']\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Sanity check finished.",
+                getLogContent());
+        logRecorder.resetLogContent();
+        assertEquals("[]", status.getErrorStatuses().toString());
+        assertEquals(false, new File(share, ds1.getDataSetCode()).exists());
+        assertEquals("[ds1]: ARCHIVED true\n", statusUpdater.toString());
+        ds1.setDataSetSize(10 * FileUtils.ONE_GB);
+        prepareListDataSetsByCode(DataSetArchivingStatus.ARCHIVED, ds1);
+        prepareListPhysicalDataSets();
+        archiverContext.setForceUnarchiving(true);
+
+        prepareNotifyDataSetAccess(ds1.getDataSetCode());
+
+        status = archiver.unarchive(Arrays.asList(ds1), archiverContext);
+
+        assertEquals("INFO  OPERATION.AbstractDatastorePlugin - Unarchiving of the following datasets "
+                + "has been requested: [Dataset 'ds1']\n"
+                + "INFO  OPERATION.AbstractDatastorePlugin - Free space on unarchiving scratch share '1': "
+                + "34.00 GB, requested space for unarchiving 1 data sets: 10.00 GB\n", getFilteredLogContent());
+        assertEquals("[ds1]: ARCHIVED true\n[ds1]: AVAILABLE true\n", statusUpdater.toString());
+        assertContent("ds1:\n  data:\n    >0123456789\n", new File(share, ds1.getDataSetCode()));
+        assertEquals("[" + staging.getAbsolutePath() + "/ds1-yyyyMMdd-HHmmss.tar]", 
+                removeTimeInformationFromContent(cleaner.toString()));
+        context.assertIsSatisfied();
+    }
+
     @Test
     public void testUnarchiveWithDataSetsFromDifferentContainers()
     {
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTaskTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTaskTest.java
new file mode 100644
index 00000000000..242e94af3e3
--- /dev/null
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDataSetUnarchivingMaintenanceTaskTest.java
@@ -0,0 +1,315 @@
+/*
+ * Copyright 2015 ETH Zuerich, SIS
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.log4j.Level;
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
+import org.jmock.Expectations;
+import org.jmock.Mockery;
+import org.testng.AssertJUnit;
+import org.testng.ITestResult;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.common.logging.BufferedAppender;
+import ch.systemsx.cisd.common.test.RecordingMatcher;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.IMultiDataSetArchiverDBTransaction;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.IMultiDataSetArchiverReadonlyQueryDAO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverContainerDTO;
+import ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver.dataaccess.MultiDataSetArchiverDataSetDTO;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ArchiverTaskContext;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IArchiverPlugin;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataSetDirectoryProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IDataStoreServiceInternal;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
+import ch.systemsx.cisd.openbis.dss.generic.shared.IHierarchicalContentProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProviderTestWrapper;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.AbstractExternalData;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataStore;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataSetBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.basic.dto.builders.DataStoreBuilder;
+import ch.systemsx.cisd.openbis.generic.shared.dto.DatasetDescription;
+import ch.systemsx.cisd.openbis.util.LogRecordingUtils;
+
+/**
+ * @author Franz-Josef Elmer
+ */
+public class MultiDataSetUnarchivingMaintenanceTaskTest extends AssertJUnit
+{
+    private static final String LOG_PREFIX = "INFO  OPERATION.MultiDataSetUnarchivingMaintenanceTask - ";
+
+    private static final class MockMultiDataSetUnarchivingMaintenanceTask extends MultiDataSetUnarchivingMaintenanceTask
+    {
+
+        private IEncapsulatedOpenBISService service;
+
+        private IHierarchicalContentProvider hierarchicalContentProvider;
+
+        private IDataStoreServiceInternal dataStoreService;
+
+        private IMultiDataSetArchiverReadonlyQueryDAO readonlyQuery;
+
+        private IMultiDataSetArchiverDBTransaction transaction;
+
+        public void setService(IEncapsulatedOpenBISService service)
+        {
+            this.service = service;
+        }
+
+        public void setHierarchicalContentProvider(IHierarchicalContentProvider hierarchicalContentProvider)
+        {
+            this.hierarchicalContentProvider = hierarchicalContentProvider;
+        }
+
+        public void setDataStoreService(IDataStoreServiceInternal dataStoreService)
+        {
+            this.dataStoreService = dataStoreService;
+        }
+
+        public void setReadonlyQuery(IMultiDataSetArchiverReadonlyQueryDAO readonlyQuery)
+        {
+            this.readonlyQuery = readonlyQuery;
+        }
+
+        public void setTransaction(IMultiDataSetArchiverDBTransaction transaction)
+        {
+            this.transaction = transaction;
+        }
+
+        @Override
+        IEncapsulatedOpenBISService getASService()
+        {
+            return service;
+        }
+
+        @Override
+        IHierarchicalContentProvider getHierarchicalContentProvider()
+        {
+            return hierarchicalContentProvider;
+        }
+
+        @Override
+        IDataStoreServiceInternal getDataStoreService()
+        {
+            return dataStoreService;
+        }
+
+        @Override
+        IMultiDataSetArchiverReadonlyQueryDAO getReadonlyQuery()
+        {
+            return readonlyQuery;
+        }
+
+        @Override
+        IMultiDataSetArchiverDBTransaction getTransaction()
+        {
+            return transaction;
+        }
+    }
+
+    private BufferedAppender logRecorder;
+
+    private Mockery context;
+
+    private IEncapsulatedOpenBISService service;
+
+    private IHierarchicalContentProvider hierarchicalContentProvider;
+
+    private IDataStoreServiceInternal dataStoreService;
+
+    private IMultiDataSetArchiverReadonlyQueryDAO readonlyQueryDAO;
+
+    private IMultiDataSetArchiverDBTransaction transaction;
+
+    private MockMultiDataSetUnarchivingMaintenanceTask maintenanceTask;
+
+    private IArchiverPlugin archiverPlugin;
+
+    private IDataSetDirectoryProvider directoryProvider;
+
+    @BeforeMethod
+    public void setUpTestEnvironment()
+    {
+        logRecorder = LogRecordingUtils.createRecorder("%-5p %c - %m%n", Level.INFO, "OPERATION.*");
+        context = new Mockery();
+        service = context.mock(IEncapsulatedOpenBISService.class);
+        hierarchicalContentProvider = context.mock(IHierarchicalContentProvider.class);
+        dataStoreService = context.mock(IDataStoreServiceInternal.class);
+        readonlyQueryDAO = context.mock(IMultiDataSetArchiverReadonlyQueryDAO.class);
+        transaction = context.mock(IMultiDataSetArchiverDBTransaction.class);
+        archiverPlugin = context.mock(IArchiverPlugin.class);
+        directoryProvider = context.mock(IDataSetDirectoryProvider.class);
+        maintenanceTask = new MockMultiDataSetUnarchivingMaintenanceTask();
+        maintenanceTask.setService(service);
+        maintenanceTask.setHierarchicalContentProvider(hierarchicalContentProvider);
+        maintenanceTask.setDataStoreService(dataStoreService);
+        maintenanceTask.setReadonlyQuery(readonlyQueryDAO);
+        maintenanceTask.setTransaction(transaction);
+        context.checking(new Expectations()
+            {
+                {
+                    allowing(dataStoreService).getArchiverPlugin();
+                    will(returnValue(archiverPlugin));
+                    allowing(dataStoreService).getDataSetDirectoryProvider();
+                    will(returnValue(directoryProvider));
+                }
+            });
+    }
+
+    @AfterMethod
+    public void checkMockExpectations(ITestResult result)
+    {
+        if (result.getStatus() == ITestResult.FAILURE)
+        {
+            String logContent = logRecorder.getLogContent();
+            fail(result.getName() + " failed. Log content:\n" + logContent);
+        }
+        logRecorder.reset();
+        // To following line of code should also be called at the end of each test method.
+        // Otherwise one does not known which test failed.
+        context.assertIsSatisfied();
+
+        ServiceProviderTestWrapper.restoreApplicationContext();
+    }
+
+    @Test
+    public void test()
+    {
+        MultiDataSetArchiverContainerDTO container1 = container(12);
+        MultiDataSetArchiverContainerDTO container2 = container(18);
+        prepareListContainersForUnarchiving(container1, container2);
+        MultiDataSetArchiverDataSetDTO dataSet1 = dataSet(42);
+        MultiDataSetArchiverDataSetDTO dataSet2 = dataSet(43);
+        MultiDataSetArchiverDataSetDTO dataSet3 = dataSet(44);
+        MultiDataSetArchiverDataSetDTO dataSet4 = dataSet(45);
+        RecordingMatcher<ArchiverTaskContext> recordingMatcher1 =
+                prepareForDataSetsOfContainer(container1.getId(), dataSet1, dataSet2, dataSet3);
+        RecordingMatcher<ArchiverTaskContext> recordingMatcher2 =
+                prepareForDataSetsOfContainer(container2.getId(), dataSet4);
+        prepareTransactionCommit(2);
+
+        maintenanceTask.execute();
+
+        assertEquals(LOG_PREFIX + "Start unarchiving [DS-42, DS-43, DS-44]\n" 
+                + LOG_PREFIX + "Unarchiving finished for [DS-42, DS-43, DS-44]\n" 
+                + LOG_PREFIX + "Start unarchiving [DS-45]\n"
+                + LOG_PREFIX + "Unarchiving finished for [DS-45]", logRecorder.getLogContent());
+        assertExpectedArchiverTaskContext(recordingMatcher1.recordedObject());
+        assertExpectedArchiverTaskContext(recordingMatcher2.recordedObject());
+        context.assertIsSatisfied();
+    }
+    
+    private void assertExpectedArchiverTaskContext(ArchiverTaskContext archiverTaskContext)
+    {
+        assertSame(directoryProvider, archiverTaskContext.getDirectoryProvider());
+        assertSame(hierarchicalContentProvider, archiverTaskContext.getHierarchicalContentProvider());
+        assertEquals(true, archiverTaskContext.isForceUnarchiving());
+        
+    }
+
+    private void prepareListContainersForUnarchiving(final MultiDataSetArchiverContainerDTO... containers)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    one(readonlyQueryDAO).listContainersForUnarchiving();
+                    will(returnValue(Arrays.asList(containers)));
+
+                    for (MultiDataSetArchiverContainerDTO container : containers)
+                    {
+                        one(transaction).resetRequestUnarchiving(container.getId());
+                    }
+                }
+            });
+    }
+
+    private RecordingMatcher<ArchiverTaskContext> prepareForDataSetsOfContainer(final long containerId,
+            final MultiDataSetArchiverDataSetDTO... dataSets)
+    {
+        final RecordingMatcher<ArchiverTaskContext> contextRecorder = new RecordingMatcher<ArchiverTaskContext>();
+        context.checking(new Expectations()
+            {
+                {
+                    one(readonlyQueryDAO).listDataSetsForContainerId(containerId);
+                    will(returnValue(Arrays.asList(dataSets)));
+
+                    final List<String> dataSetCodes = new ArrayList<String>();
+                    List<AbstractExternalData> dataSets2 = new ArrayList<AbstractExternalData>();
+                    DataStore dataStore = new DataStoreBuilder("DSS").getStore();
+                    for (MultiDataSetArchiverDataSetDTO dataSet : dataSets)
+                    {
+                        dataSetCodes.add(dataSet.getCode());
+                        DataSetBuilder builder = new DataSetBuilder(dataSet.getId()).code(dataSet.getCode())
+                                .store(dataStore).fileFormat("UNKNOWN");
+                        dataSets2.add(builder.getDataSet());
+                    }
+                    one(service).listDataSetsByCode(dataSetCodes);
+                    will(returnValue(dataSets2));
+
+                    one(archiverPlugin).unarchive(with(new BaseMatcher<List<DatasetDescription>>()
+                        {
+                            @SuppressWarnings("unchecked")
+                            @Override
+                            public boolean matches(Object obj)
+                            {
+                                assertEquals(dataSetCodes.toString(), DatasetDescription.extractCodes(
+                                        (List<DatasetDescription>) obj).toString());
+                                return true;
+                            }
+
+                            @Override
+                            public void describeTo(Description description)
+                            {
+                                description.appendText(dataSets.toString());
+                            }
+                        }), with(contextRecorder));
+                }
+            });
+        return contextRecorder;
+    }
+
+    private void prepareTransactionCommit(final int count)
+    {
+        context.checking(new Expectations()
+            {
+                {
+                    exactly(count).of(transaction).commit();
+                    exactly(count).of(transaction).close();
+                }
+            });
+    }
+
+    private MultiDataSetArchiverContainerDTO container(long id)
+    {
+        MultiDataSetArchiverContainerDTO container = new MultiDataSetArchiverContainerDTO();
+        container.setId(id);
+        container.setUnarchivingRequested(true);
+        return container;
+    }
+
+    private MultiDataSetArchiverDataSetDTO dataSet(long id)
+    {
+        return new MultiDataSetArchiverDataSetDTO(id, "DS-" + id, 1, id * 100);
+    }
+
+}
-- 
GitLab