diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java
index 1080f331a5feec93affa7e593e663b7811f5a3d8..fcf6723937a836a0010f750ad69794db1660bc4a 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/MultiDatasetArchiver.java
@@ -18,6 +18,7 @@ package ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.archiver;
 
 import java.io.File;
 import java.io.Serializable;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Properties;
 
@@ -92,9 +93,10 @@ public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
         verifyDataSetsSize(dataSets);
 
         DatasetProcessingStatuses result = new DatasetProcessingStatuses();
-
         MultiDatasetArchiverDBTransaction transaction = new MultiDatasetArchiverDBTransaction();
 
+        filterAlreadyPresentInArchive(dataSets, result, transaction);
+
         try
         {
             result = doArchive(dataSets, transaction, context);
@@ -117,6 +119,21 @@ public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
         return result;
     }
 
+    private void filterAlreadyPresentInArchive(List<DatasetDescription> dataSets, DatasetProcessingStatuses result,
+            MultiDatasetArchiverDBTransaction transaction)
+    {
+        Iterator<DatasetDescription> it = dataSets.iterator();
+        while (it.hasNext())
+        {
+            DatasetDescription dataSet = it.next();
+            if (transaction.getDataSetByCode(dataSet.getDataSetCode()) != null)
+            {
+                result.addResult(dataSet.getDataSetCode(), Status.OK, Operation.ARCHIVE);
+                it.remove();
+            }
+        }
+    }
+
     private void verifyDataSetsSize(List<DatasetDescription> dataSets)
     {
         long datasetSize = getDataSetsSize(dataSets);
@@ -125,8 +142,8 @@ public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
             if (datasetSize < minimumContainerSize)
             {
                 throw new IllegalArgumentException("Dataset " + dataSets.get(0).getDataSetCode()
-                        + " is too small (" + FileUtilities.byteCountToDisplaySize(datasetSize) 
-                        + ") to be archived with multi dataset archiver because minimum size is " 
+                        + " is too small (" + FileUtilities.byteCountToDisplaySize(datasetSize)
+                        + ") to be archived with multi dataset archiver because minimum size is "
                         + FileUtilities.byteCountToDisplaySize(minimumContainerSize) + ".");
             }
             // if single dataset is bigger than specified maximum, we should still allow it being
@@ -136,15 +153,15 @@ public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
             if (datasetSize < minimumContainerSize)
             {
                 throw new IllegalArgumentException("Set of data sets specified for archiving is too small ("
-                        + FileUtilities.byteCountToDisplaySize(datasetSize) 
-                        + ") to be archived with multi dataset archiver because minimum size is " 
+                        + FileUtilities.byteCountToDisplaySize(datasetSize)
+                        + ") to be archived with multi dataset archiver because minimum size is "
                         + FileUtilities.byteCountToDisplaySize(minimumContainerSize) + ".");
             }
             else if (datasetSize > maximumContainerSize)
             {
                 throw new IllegalArgumentException("Set of data sets specified for archiving is too big ("
-                        + FileUtilities.byteCountToDisplaySize(datasetSize) 
-                        + ") to be archived with multi dataset archiver because maximum size is " 
+                        + FileUtilities.byteCountToDisplaySize(datasetSize)
+                        + ") to be archived with multi dataset archiver because maximum size is "
                         + FileUtilities.byteCountToDisplaySize(maximumContainerSize) + ".");
             }
         }
@@ -207,7 +224,7 @@ public class MultiDatasetArchiver extends AbstractArchiverProcessingPlugin
 
     }
 
-    private void checkArchivedDataSets(IHierarchicalContent archivedContent, List<DatasetDescription> dataSets, 
+    private void checkArchivedDataSets(IHierarchicalContent archivedContent, List<DatasetDescription> dataSets,
             ArchiverTaskContext context, DatasetProcessingStatuses statuses)
     {
         Status status;
diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDatasetArchiverDBTransaction.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDatasetArchiverDBTransaction.java
index 518fc29e215ec89c7d2c9ff3a45fea71e00dfaf5..9183ad560685b721086e88af48e9368f786ab050 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDatasetArchiverDBTransaction.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/standard/archiver/dataaccess/MultiDatasetArchiverDBTransaction.java
@@ -65,6 +65,14 @@ public class MultiDatasetArchiverDBTransaction
         return container;
     }
 
+    /**
+     * Returns null if no data set found
+     */
+    public MultiDataSetArchiverDataSetDTO getDataSetByCode(String dataSetCode)
+    {
+        return transaction.getDataSetForCode(dataSetCode);
+    }
+
     public MultiDataSetArchiverDataSetDTO insertDataset(DatasetDescription dataSet,
             MultiDataSetArchiverContainerDTO container)
     {
@@ -74,7 +82,7 @@ public class MultiDatasetArchiverDBTransaction
 
         if (mads != null)
         {
-            throw new IllegalStateException("Trying to add dataset that has already been added.");
+            throw new IllegalStateException("Dataset " + dataSet.getDataSetCode() + "is already archived in other container");
         }
 
         mads = new MultiDataSetArchiverDataSetDTO(0, code, container.getId(), dataSet.getDataSetSize());