diff --git a/datastore_server/source/java/ch/systemsx/cisd/etlserver/postregistration/TaskExecutor.java b/datastore_server/source/java/ch/systemsx/cisd/etlserver/postregistration/TaskExecutor.java index f109e2374aa74c53d3bcaa8c5686094bd528bfad..bd285e63308fce56e7bbe272f0e0b23adf9ec041 100644 --- a/datastore_server/source/java/ch/systemsx/cisd/etlserver/postregistration/TaskExecutor.java +++ b/datastore_server/source/java/ch/systemsx/cisd/etlserver/postregistration/TaskExecutor.java @@ -28,7 +28,6 @@ import org.apache.log4j.Logger; import ch.rinn.restrictions.Private; import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel; import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException; -import ch.systemsx.cisd.common.logging.ISimpleLogger; import ch.systemsx.cisd.common.logging.Log4jSimpleLogger; import ch.systemsx.cisd.common.logging.LogCategory; import ch.systemsx.cisd.common.logging.LogFactory; @@ -133,29 +132,34 @@ public class TaskExecutor } /** - * Performs all cleanup task in cleanup-tasks folder. + * Performs all cleanup tasks in cleanup-tasks folder. */ public void cleanup() { Log4jSimpleLogger logger = new Log4jSimpleLogger(operationLog); File[] files = cleanupTasksFolder.listFiles(FILTER); - for (File file : files) + if (files != null) { - cleanupTask(file, logger); + operationLog.info("Perform " + files.length + " clean up task."); + for (File file : files) + { + try + { + ICleanupTask cleanupTask = deserializeFromFile(file); + cleanupTask.cleanup(logger); + } catch (Exception ex) + { + operationLog.error("Couldn't performed clean up task " + file, ex); + } + file.delete(); + } } } - private void cleanupTask(File file, ISimpleLogger logger) + private ICleanupTask deserializeFromFile(File file) throws IOException { - try - { - byte[] bytes = FileUtils.readFileToByteArray(file); - ((ICleanupTask) SerializationUtils.deserialize(bytes)).cleanup(logger); - } catch (Exception ex) - { - operationLog.error("Couldn't performed clean up task " + file, ex); - } - file.delete(); + byte[] bytes = FileUtils.readFileToByteArray(file); + return (ICleanupTask) SerializationUtils.deserialize(bytes); } } diff --git a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/utils/SegmentedStoreUtils.java b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/utils/SegmentedStoreUtils.java index e9f9c247d90b9b4d5ddd8169fba21448337ef4f5..599921c0d988539f61283f72bba68b595349c233 100644 --- a/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/utils/SegmentedStoreUtils.java +++ b/datastore_server/source/java/ch/systemsx/cisd/openbis/dss/generic/shared/utils/SegmentedStoreUtils.java @@ -247,8 +247,8 @@ public class SegmentedStoreUtils copyToShare(dataSetDirInStore, dataSetDirInNewShare); long size = assertEqualSizeAndChildren(dataSetDirInStore, dataSetDirInNewShare); String shareId = share.getName(); - shareIdManager.setShareId(dataSetCode, shareId); service.updateShareIdAndSize(dataSetCode, shareId, size); + shareIdManager.setShareId(dataSetCode, shareId); deleteDataSet(dataSetCode, dataSetDirInStore, shareIdManager, logger); } @@ -276,12 +276,24 @@ public class SegmentedStoreUtils } } + /** + * Deletes specified data set in the old share if it is already in the new one or in the new one + * if it is still in the old one. + * + * @param shareIdManager provides the current share. + */ public static void cleanUp(SimpleDataSetInformationDTO dataSet, File storeRoot, String newShareId, IShareIdManager shareIdManager, ISimpleLogger logger) { String dataSetCode = dataSet.getDataSetCode(); String shareId = shareIdManager.getShareId(dataSetCode); String oldShareId = dataSet.getDataSetShareId(); + if (newShareId.equals(oldShareId)) + { + logger.log(LogLevel.WARN, "No clean up will be performed because for data set " + + dataSetCode + " both shares are the same: " + oldShareId); + return; + } boolean currentIsOld = shareId.equals(oldShareId); boolean currentIsNew = shareId.equals(newShareId); if (currentIsOld == false && currentIsNew == false) @@ -292,7 +304,8 @@ public class SegmentedStoreUtils return; } File shareFolder = new File(storeRoot, currentIsOld ? newShareId : oldShareId); - deleteDataSet(dataSetCode, new File(shareFolder, dataSet.getDataSetLocation()), shareIdManager, logger); + String location = dataSet.getDataSetLocation(); + deleteDataSet(dataSetCode, new File(shareFolder, location), shareIdManager, logger); } private static void copyToShare(File file, File share)