Skip to content
Snippets Groups Projects
Commit f90799f5 authored by gakin's avatar gakin
Browse files

SSDM-2054 : Multi Data Set Archiving Finalizer improvements - Handle the case...

SSDM-2054 : Multi Data Set Archiving Finalizer improvements -  Handle the case when the original archive file no longer exists.

SVN: 34340
parent 0811ab1a
No related branches found
No related tags found
No related merge requests found
......@@ -98,33 +98,34 @@ class MultiDataSetArchivingFinalizer implements IProcessingPluginTask
try
{
Parameters parameters = getParameters(context);
File originalFile = parameters.getOriginalFile();
operationLog.info("Waiting for replication of archive '" + originalFile
+ "' containing the following data sets: " + CollectionUtils.abbreviate(dataSetCodes, 20));
boolean noTimeout = waitUntilReplicated(parameters);
DataSetArchivingStatus archivingStatus = parameters.getStatus();
boolean removeFromDataStore = archivingStatus.isAvailable() == false;
if (noTimeout)
File originalFile = parameters.getOriginalFile();
if (originalFile.exists() == false)
{
DataSetCodesWithStatus codesWithStatus = new DataSetCodesWithStatus(dataSetCodes, archivingStatus, true);
IDataSetDeleter dataSetDeleter = ServiceProvider.getDataStoreService().getDataSetDeleter();
if (removeFromDataStore)
{
dataSetDeleter.scheduleDeletionOfDataSets(datasets,
TimingParameters.DEFAULT_MAXIMUM_RETRY_COUNT,
TimingParameters.DEFAULT_INTERVAL_TO_WAIT_AFTER_FAILURE_SECONDS);
}
updateStatus(codesWithStatus);
String message = "Replication of '" + originalFile + "' failed because the original file does not exist.";
status = createStatusAndRearchive(dataSetCodes, parameters, removeFromDataStore, originalFile, message);
} else
{
String message = "Replication of '" + originalFile + "' failed.";
operationLog.error(message);
status = Status.createError(message);
getCleaner().delete(originalFile);
getCleaner().delete(parameters.getReplicatedFile());
removeFromMapping(originalFile);
updateStatus(new DataSetCodesWithStatus(dataSetCodes, DataSetArchivingStatus.AVAILABLE, false));
ServiceProvider.getOpenBISService().archiveDataSets(dataSetCodes, removeFromDataStore);
operationLog.info("Waiting for replication of archive '" + originalFile
+ "' containing the following data sets: " + CollectionUtils.abbreviate(dataSetCodes, 20));
boolean noTimeout = waitUntilReplicated(parameters);
if (noTimeout)
{
DataSetCodesWithStatus codesWithStatus = new DataSetCodesWithStatus(dataSetCodes, archivingStatus, true);
IDataSetDeleter dataSetDeleter = ServiceProvider.getDataStoreService().getDataSetDeleter();
if (removeFromDataStore)
{
dataSetDeleter.scheduleDeletionOfDataSets(datasets,
TimingParameters.DEFAULT_MAXIMUM_RETRY_COUNT,
TimingParameters.DEFAULT_INTERVAL_TO_WAIT_AFTER_FAILURE_SECONDS);
}
updateStatus(codesWithStatus);
} else
{
String message = "Replication of '" + originalFile + "' failed.";
status = createStatusAndRearchive(dataSetCodes, parameters, removeFromDataStore, originalFile, message);
}
}
} catch (Exception ex)
{
......@@ -136,6 +137,18 @@ class MultiDataSetArchivingFinalizer implements IProcessingPluginTask
return processingStatus;
}
private Status createStatusAndRearchive(List<String> dataSetCodes, Parameters parameters, boolean removeFromDataStore, File originalFile, String message)
{
operationLog.error(message);
Status status = Status.createError(message);
getCleaner().delete(originalFile);
getCleaner().delete(parameters.getReplicatedFile());
removeFromMapping(originalFile);
updateStatus(new DataSetCodesWithStatus(dataSetCodes, DataSetArchivingStatus.AVAILABLE, false));
ServiceProvider.getOpenBISService().archiveDataSets(dataSetCodes, removeFromDataStore);
return status;
}
private void removeFromMapping(File originalFile)
{
IMultiDataSetArchiverDBTransaction transaction = getTransaction();
......
......@@ -156,6 +156,41 @@ public class MultiDataSetArchivingFinalizerTest extends AbstractFileSystemTestCa
context.assertIsSatisfied();
}
@Test
public void testReplicationFailDueToMissingArchiveFile()
{
final DatasetDescription ds1 = new DatasetDescriptionBuilder("ds1").getDatasetDescription();
dataFileInArchive.delete();
context.checking(new Expectations()
{
{
one(transaction).deleteContainer(dataFileInArchive.getName());
one(transaction).commit();
one(transaction).close();
one(openBISService).archiveDataSets(Arrays.asList(ds1.getDataSetCode()), true);
}
});
ProcessingStatus status = createFinalizer().process(Arrays.asList(ds1), processingContext);
assertEquals("INFO OPERATION.MultiDataSetArchivingFinalizer - "
+ "Parameters: {original-file-path=" + dataFileInArchive.getPath()
+ ", replicated-file-path=" + dataFileReplicated.getPath() + ", "
+ "finalizer-polling-time=20000, start-time=" + START_TIME_AS_STRING + ", "
+ "finalizer-max-waiting-time=300000, status=ARCHIVED}\n"
+ "ERROR OPERATION.MultiDataSetArchivingFinalizer - Replication of "
+ "'" + dataFileInArchive.getPath() + "' failed because the original file does not exist.",
logRecorder.getLogContent());
assertEquals("ERROR: \"Replication of '" + dataFileInArchive.getPath() + "' failed because the original file does not exist.\"",
status.tryGetStatusByDataset(ds1.getDataSetCode()).toString());
assertEquals("[[ds1] - AVAILABLE]", updatedStatus.toString());
assertEquals(false, updatedStatus.get(0).isPresentInArchive());
assertEquals(Arrays.asList(dataFileInArchive, dataFileReplicated).toString(), cleaner.toString());
context.assertIsSatisfied();
}
@Test
public void testReplicationForArchiving()
{
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment