From e4f63486fc56a1f27d65acd57c6f8dcfe242eef6 Mon Sep 17 00:00:00 2001 From: tpylak <tpylak> Date: Thu, 24 Jun 2010 14:43:33 +0000 Subject: [PATCH] LMS-1546 HCS: dataset uploader bugfix: handle the case when 2 theads upload data for the same plate SVN: 16722 --- .../openbis/dss/etl/HCSDatasetUploader.java | 139 +-------- .../openbis/dss/etl/HCSImageCheckList.java | 2 +- .../ScreeningContainerDatasetInfoHelper.java | 283 ++++++++++++++++-- .../featurevector/FeatureVectorUploader.java | 8 +- 4 files changed, 271 insertions(+), 161 deletions(-) diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java index 4becace9275..a94af70cbfe 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java @@ -25,11 +25,10 @@ import java.util.Set; import java.util.Map.Entry; import ch.systemsx.cisd.bds.hcs.Location; -import ch.systemsx.cisd.common.exceptions.UserFailureException; -import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel; +import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException; +import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfoHelper.ExperimentWithChannelsAndContainer; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgAcquiredImageDTO; -import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelStackDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgImageDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgSpotDTO; @@ -58,123 +57,17 @@ public class HCSDatasetUploader private void upload(ScreeningContainerDatasetInfo info, List<AcquiredPlateImage> images, Set<HCSImageFileExtractionResult.Channel> channels) { - long expId = getOrCreateExperiment(info); - long contId = getOrCreateContainer(expId, info); + ExperimentWithChannelsAndContainer basicStruct = + ScreeningContainerDatasetInfoHelper.getOrCreateExperimentWithChannelsAndContainer( + dao, info, channels); + long contId = basicStruct.getContainerId(); + Map<String, Long/* (tech id */> channelsMap = basicStruct.getChannelsMap(); Long[][] spotIds = getOrCreateSpots(contId, info, images); - Map<String, Long/* (tech id */> channelsMap = getOrCreateChannels(expId, channels); long datasetId = createDataset(contId, info); createImages(images, spotIds, channelsMap, datasetId); } - private Map<String, Long> getOrCreateChannels(long expId, - Set<HCSImageFileExtractionResult.Channel> channels) - { - List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId); - if (allChannels.size() == 0) - { - return createChannels(expId, channels); - } else - { - return updateChannels(expId, channels, allChannels); - } - } - - private Map<String, Long> updateChannels(long expId, Set<Channel> channels, - List<ImgChannelDTO> allChannels) - { - Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels); - Map<String, Long> map = new HashMap<String, Long>(); - for (HCSImageFileExtractionResult.Channel channel : channels) - { - ImgChannelDTO channelDTO = updateChannel(channel, expId, existingChannels); - addChannel(map, channelDTO); - } - return map; - } - - private Map<String, Long> createChannels(long expId, Set<Channel> channels) - { - Map<String, Long> map = new HashMap<String, Long>(); - for (HCSImageFileExtractionResult.Channel channel : channels) - { - ImgChannelDTO channelDTO = createChannel(expId, channel); - addChannel(map, channelDTO); - } - return map; - } - - private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO) - { - map.put(channelDTO.getName(), channelDTO.getId()); - } - - private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels) - { - Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>(); - for (ImgChannelDTO channel : channels) - { - nameMap.put(channel.getName().toUpperCase(), channel); - } - return nameMap; - } - - private ImgChannelDTO updateChannel(HCSImageFileExtractionResult.Channel channel, long expId, - Map<String, ImgChannelDTO> existingChannels) - { - ImgChannelDTO channelDTO = makeChannelDTO(channel, expId); - String channelName = channelDTO.getName(); - ImgChannelDTO existingChannel = existingChannels.get(channelName); - if (existingChannel == null) - { - throw createInvalidNewChannelException(expId, existingChannels, channelName); - } - // a channel with a specified name already exists for an experiment, its description - // will be updated. Wavelength will be updated only if it was null before. - if (channelDTO.getWavelength() == null) - { - channelDTO.setWavelength(existingChannel.getWavelength()); - } - if (existingChannel.getWavelength() != null - && existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false) - { - throw UserFailureException.fromTemplate( - "There are already datasets registered for the experiment " - + "which use the same channel name, but with a different wavelength! " - + "Channel %s, old wavelength %d, new wavelength %d.", channelName, - existingChannel.getWavelength(), channelDTO.getWavelength()); - } - channelDTO.setId(existingChannel.getId()); - dao.updateChannel(channelDTO); - return channelDTO; - } - - private static UserFailureException createInvalidNewChannelException(long expId, - Map<String, ImgChannelDTO> existingChannels, String channelName) - { - return UserFailureException.fromTemplate( - "Experiment with id '%d' has already some channels registered " - + "and does not have a channel with a name '%s'. " - + "Register a new experiment to use new channels. " - + "Available channel names in this experiment: %s.", expId, channelName, - existingChannels.keySet()); - } - - private ImgChannelDTO createChannel(long expId, HCSImageFileExtractionResult.Channel channel) - { - ImgChannelDTO channelDTO = makeChannelDTO(channel, expId); - long channelId = dao.addChannel(channelDTO); - channelDTO.setId(channelId); - return channelDTO; - } - - private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel, - long expId) - { - return ImgChannelDTO.createExperimentChannel(channel.getName().toUpperCase(), channel - .tryGetDescription(), channel.tryGetWavelength(), expId); - } - private static class AcquiredImageInStack { private final String channelName; @@ -273,7 +166,13 @@ public class HCSDatasetUploader { for (AcquiredImageInStack image : images) { - long channelTechId = channelsMap.get(image.getChannelName()); + String channelName = image.getChannelName().toUpperCase(); + Long channelTechId = channelsMap.get(channelName); + if (channelTechId == null) + { + throw new EnvironmentFailureException("Invalid channel name " + channelName + + ". Available channels: " + channelsMap.keySet()); + } createImage(stackId, channelTechId, image); } } @@ -423,14 +322,4 @@ public class HCSDatasetUploader { return ScreeningContainerDatasetInfoHelper.createDataset(dao, info, contId); } - - private long getOrCreateContainer(long expId, ScreeningContainerDatasetInfo info) - { - return ScreeningContainerDatasetInfoHelper.getOrCreateContainer(dao, info, expId); - } - - private long getOrCreateExperiment(ScreeningContainerDatasetInfo info) - { - return ScreeningContainerDatasetInfoHelper.getOrCreateExperiment(dao, info); - } } diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java index 7155c4e8e2c..95827467dc2 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java @@ -143,7 +143,7 @@ public final class HCSImageCheckList this.wellCol = wellCol; this.tileRow = tileRow; this.tileCol = tileCol; - this.channelName = channelName; + this.channelName = channelName.toUpperCase(); } private final static String toString(final int row, final int col, final String type) diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java index 8ce0d018c68..36e34dd94d3 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java @@ -16,7 +16,15 @@ package ch.systemsx.cisd.openbis.dss.etl; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import ch.systemsx.cisd.common.exceptions.UserFailureException; +import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO; +import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgContainerDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO; @@ -28,76 +36,291 @@ import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO; */ public class ScreeningContainerDatasetInfoHelper { - private final ScreeningContainerDatasetInfo info; - private final IImagingUploadDAO dao; - public ScreeningContainerDatasetInfoHelper(IImagingUploadDAO dao, - ScreeningContainerDatasetInfo info) + public ScreeningContainerDatasetInfoHelper(IImagingUploadDAO dao) { this.dao = dao; - this.info = info; } - public long getOrCreateExperiment() - { - return getOrCreateExperiment(dao, info); - } - - public long getOrCreateContainer(long expId) + public ExperimentAndContainerIds getOrCreateExperimentAndContainer( + ScreeningContainerDatasetInfo info) { - return getOrCreateContainer(dao, info, expId); + return getOrCreateExperimentAndContainer(dao, info); } - public long getOrCreateDataset(long contId) + public long createDataset(long contId, ScreeningContainerDatasetInfo info) { - ImgDatasetDTO dataset = dao.tryGetDatasetByPermId(info.getDatasetPermId()); - if (null != dataset) - { - return dataset.getId(); - } else - { - return createDataset(dao, info, contId); - } - + return createDataset(dao, info, contId); } // Package-visible static methods - static long createDataset(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, long contId) + public static long createDataset(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, + long contId) { + System.out.println("dataset perm id = " + info.getDatasetPermId()); ImgDatasetDTO dataset = new ImgDatasetDTO(info.getDatasetPermId(), info.getTileRows(), info .getTileColumns(), contId); return dao.addDataset(dataset); } - static long getOrCreateContainer(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, - long expId) + /** + * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them + * if they don't exist is in synchronized block and uses currently opened transaction. Then the + * transaction is closed and data set is added to the DB in second transaction. If second + * transaction will be rolled back sample and experiment created in first transaction will stay + * in the DB. + */ + public static ExperimentAndContainerIds getOrCreateExperimentAndContainer( + IImagingUploadDAO dao, ScreeningContainerDatasetInfo info) + { + synchronized (IImagingUploadDAO.class) + { + CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info); + CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, exp.getId()); + if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false) + { + // without this commit other threads will not see the new experiment/sample when the + // synchronized block ends + dao.commit(); + } + return new ExperimentAndContainerIds(exp.getId(), cont.getId()); + } + } + + /** + * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them + * if they don't exist is in synchronized block and uses currently opened transaction. Then the + * transaction is closed and data set is added to the DB in second transaction. If second + * transaction will be rolled back sample and experiment created in first transaction will stay + * in the DB. + */ + public static ExperimentWithChannelsAndContainer getOrCreateExperimentWithChannelsAndContainer( + IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, + Set<HCSImageFileExtractionResult.Channel> channels) + { + ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao); + synchronized (IImagingUploadDAO.class) + { + CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info); + long expId = exp.getId(); + CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, expId); + Map<String, Long/* (tech id */> channelsMap = + helper.getOrCreateChannels(expId, channels); + if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false) + { + // without this commit other threads will not see the new experiment/sample when the + // synchronized block ends + dao.commit(); + } + return new ExperimentWithChannelsAndContainer(expId, cont.getId(), channelsMap); + } + } + + private static CreatedOrFetchedEntity getOrCreateContainer(IImagingUploadDAO dao, + ScreeningContainerDatasetInfo info, long expId) { String containerPermId = info.getContainerPermId(); Long containerId = dao.tryGetContainerIdPermId(containerPermId); if (containerId != null) { - return containerId; + return new CreatedOrFetchedEntity(true, containerId); } else { ImgContainerDTO container = new ImgContainerDTO(containerPermId, info.getContainerRows(), info .getContainerColumns(), expId); - return dao.addContainer(container); + containerId = dao.addContainer(container); + return new CreatedOrFetchedEntity(false, containerId); } } - static long getOrCreateExperiment(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info) + private static CreatedOrFetchedEntity getOrCreateExperiment(IImagingUploadDAO dao, + ScreeningContainerDatasetInfo info) { String experimentPermId = info.getExperimentPermId(); Long expId = dao.tryGetExperimentIdByPermId(experimentPermId); if (expId != null) { - return expId; + return new CreatedOrFetchedEntity(true, expId); + } else + { + expId = dao.addExperiment(experimentPermId); + return new CreatedOrFetchedEntity(false, expId); + } + } + + private static class CreatedOrFetchedEntity + { + private final boolean alreadyExisted; + + private final long id; + + public CreatedOrFetchedEntity(boolean alreadyExisted, long id) + { + this.alreadyExisted = alreadyExisted; + this.id = id; + } + + public boolean hasAlreadyExisted() + { + return alreadyExisted; + } + + public long getId() + { + return id; + } + } + + public static class ExperimentAndContainerIds + { + private final long experimentId; + + private final long containerId; + + public ExperimentAndContainerIds(long experimentId, long containerId) + { + this.experimentId = experimentId; + this.containerId = containerId; + } + + public long getExperimentId() + { + return experimentId; + } + + public long getContainerId() + { + return containerId; + } + } + + public static class ExperimentWithChannelsAndContainer extends ExperimentAndContainerIds + { + private final Map<String, Long/* (tech id */> channelsMap; + + public ExperimentWithChannelsAndContainer(long experimentId, long containerId, + Map<String, Long> channelsMap) + { + super(experimentId, containerId); + this.channelsMap = channelsMap; + } + + public Map<String, Long> getChannelsMap() + { + return channelsMap; + } + } + + // ------ channels creation ------------------------------ + + private Map<String, Long> getOrCreateChannels(long expId, + Set<HCSImageFileExtractionResult.Channel> channels) + { + List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId); + if (allChannels.size() == 0) + { + return createChannels(expId, channels); } else { - return dao.addExperiment(experimentPermId); + return updateChannels(expId, channels, allChannels); + } + } + + private Map<String, Long> updateChannels(long expId, Set<Channel> channels, + List<ImgChannelDTO> allChannels) + { + Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels); + Map<String, Long> map = new HashMap<String, Long>(); + for (HCSImageFileExtractionResult.Channel channel : channels) + { + ImgChannelDTO channelDTO = updateChannel(channel, expId, existingChannels); + addChannel(map, channelDTO); } + return map; + } + + private Map<String, Long> createChannels(long expId, Set<Channel> channels) + { + Map<String, Long> map = new HashMap<String, Long>(); + for (HCSImageFileExtractionResult.Channel channel : channels) + { + ImgChannelDTO channelDTO = createChannel(expId, channel); + addChannel(map, channelDTO); + } + return map; + } + + private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO) + { + map.put(channelDTO.getName(), channelDTO.getId()); + } + + private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels) + { + Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>(); + for (ImgChannelDTO channel : channels) + { + nameMap.put(channel.getName().toUpperCase(), channel); + } + return nameMap; + } + + private ImgChannelDTO updateChannel(HCSImageFileExtractionResult.Channel channel, long expId, + Map<String, ImgChannelDTO> existingChannels) + { + ImgChannelDTO channelDTO = makeChannelDTO(channel, expId); + String channelName = channelDTO.getName(); + ImgChannelDTO existingChannel = existingChannels.get(channelName); + if (existingChannel == null) + { + throw createInvalidNewChannelException(expId, existingChannels, channelName); + } + // a channel with a specified name already exists for an experiment, its description + // will be updated. Wavelength will be updated only if it was null before. + if (channelDTO.getWavelength() == null) + { + channelDTO.setWavelength(existingChannel.getWavelength()); + } + if (existingChannel.getWavelength() != null + && existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false) + { + throw UserFailureException.fromTemplate( + "There are already datasets registered for the experiment " + + "which use the same channel name, but with a different wavelength! " + + "Channel %s, old wavelength %d, new wavelength %d.", channelName, + existingChannel.getWavelength(), channelDTO.getWavelength()); + } + channelDTO.setId(existingChannel.getId()); + dao.updateChannel(channelDTO); + return channelDTO; + } + + private static UserFailureException createInvalidNewChannelException(long expId, + Map<String, ImgChannelDTO> existingChannels, String channelName) + { + return UserFailureException.fromTemplate( + "Experiment with id '%d' has already some channels registered " + + "and does not have a channel with a name '%s'. " + + "Register a new experiment to use new channels. " + + "Available channel names in this experiment: %s.", expId, channelName, + existingChannels.keySet()); + } + + private ImgChannelDTO createChannel(long expId, HCSImageFileExtractionResult.Channel channel) + { + ImgChannelDTO channelDTO = makeChannelDTO(channel, expId); + long channelId = dao.addChannel(channelDTO); + channelDTO.setId(channelId); + return channelDTO; + } + + private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel, + long expId) + { + return ImgChannelDTO.createExperimentChannel(channel.getName().toUpperCase(), channel + .tryGetDescription(), channel.tryGetWavelength(), expId); } } diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java index 4b88dbe335d..9d2a93642e8 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java @@ -43,11 +43,9 @@ public class FeatureVectorUploader public void uploadFeatureVectors(List<CanonicalFeatureVector> fvecs) { - ScreeningContainerDatasetInfoHelper helper = - new ScreeningContainerDatasetInfoHelper(dao, info); - long expId = helper.getOrCreateExperiment(); - long contId = helper.getOrCreateContainer(expId); - long dataSetId = helper.getOrCreateDataset(contId); + ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao); + long contId = helper.getOrCreateExperimentAndContainer(info).getContainerId(); + long dataSetId = helper.createDataset(contId, info); for (CanonicalFeatureVector fvec : fvecs) { -- GitLab