Skip to content
Snippets Groups Projects
Commit e4f63486 authored by tpylak's avatar tpylak
Browse files

LMS-1546 HCS: dataset uploader bugfix: handle the case when 2 theads upload data for the same plate

SVN: 16722
parent 2552df19
No related branches found
No related tags found
No related merge requests found
...@@ -25,11 +25,10 @@ import java.util.Set; ...@@ -25,11 +25,10 @@ import java.util.Set;
import java.util.Map.Entry; import java.util.Map.Entry;
import ch.systemsx.cisd.bds.hcs.Location; import ch.systemsx.cisd.bds.hcs.Location;
import ch.systemsx.cisd.common.exceptions.UserFailureException; import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel; import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfoHelper.ExperimentWithChannelsAndContainer;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgAcquiredImageDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgAcquiredImageDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelStackDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelStackDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgImageDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgImageDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgSpotDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgSpotDTO;
...@@ -58,123 +57,17 @@ public class HCSDatasetUploader ...@@ -58,123 +57,17 @@ public class HCSDatasetUploader
private void upload(ScreeningContainerDatasetInfo info, List<AcquiredPlateImage> images, private void upload(ScreeningContainerDatasetInfo info, List<AcquiredPlateImage> images,
Set<HCSImageFileExtractionResult.Channel> channels) Set<HCSImageFileExtractionResult.Channel> channels)
{ {
long expId = getOrCreateExperiment(info); ExperimentWithChannelsAndContainer basicStruct =
long contId = getOrCreateContainer(expId, info); ScreeningContainerDatasetInfoHelper.getOrCreateExperimentWithChannelsAndContainer(
dao, info, channels);
long contId = basicStruct.getContainerId();
Map<String, Long/* (tech id */> channelsMap = basicStruct.getChannelsMap();
Long[][] spotIds = getOrCreateSpots(contId, info, images); Long[][] spotIds = getOrCreateSpots(contId, info, images);
Map<String, Long/* (tech id */> channelsMap = getOrCreateChannels(expId, channels);
long datasetId = createDataset(contId, info); long datasetId = createDataset(contId, info);
createImages(images, spotIds, channelsMap, datasetId); createImages(images, spotIds, channelsMap, datasetId);
} }
private Map<String, Long> getOrCreateChannels(long expId,
Set<HCSImageFileExtractionResult.Channel> channels)
{
List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId);
if (allChannels.size() == 0)
{
return createChannels(expId, channels);
} else
{
return updateChannels(expId, channels, allChannels);
}
}
private Map<String, Long> updateChannels(long expId, Set<Channel> channels,
List<ImgChannelDTO> allChannels)
{
Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels);
Map<String, Long> map = new HashMap<String, Long>();
for (HCSImageFileExtractionResult.Channel channel : channels)
{
ImgChannelDTO channelDTO = updateChannel(channel, expId, existingChannels);
addChannel(map, channelDTO);
}
return map;
}
private Map<String, Long> createChannels(long expId, Set<Channel> channels)
{
Map<String, Long> map = new HashMap<String, Long>();
for (HCSImageFileExtractionResult.Channel channel : channels)
{
ImgChannelDTO channelDTO = createChannel(expId, channel);
addChannel(map, channelDTO);
}
return map;
}
private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO)
{
map.put(channelDTO.getName(), channelDTO.getId());
}
private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels)
{
Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>();
for (ImgChannelDTO channel : channels)
{
nameMap.put(channel.getName().toUpperCase(), channel);
}
return nameMap;
}
private ImgChannelDTO updateChannel(HCSImageFileExtractionResult.Channel channel, long expId,
Map<String, ImgChannelDTO> existingChannels)
{
ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
String channelName = channelDTO.getName();
ImgChannelDTO existingChannel = existingChannels.get(channelName);
if (existingChannel == null)
{
throw createInvalidNewChannelException(expId, existingChannels, channelName);
}
// a channel with a specified name already exists for an experiment, its description
// will be updated. Wavelength will be updated only if it was null before.
if (channelDTO.getWavelength() == null)
{
channelDTO.setWavelength(existingChannel.getWavelength());
}
if (existingChannel.getWavelength() != null
&& existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false)
{
throw UserFailureException.fromTemplate(
"There are already datasets registered for the experiment "
+ "which use the same channel name, but with a different wavelength! "
+ "Channel %s, old wavelength %d, new wavelength %d.", channelName,
existingChannel.getWavelength(), channelDTO.getWavelength());
}
channelDTO.setId(existingChannel.getId());
dao.updateChannel(channelDTO);
return channelDTO;
}
private static UserFailureException createInvalidNewChannelException(long expId,
Map<String, ImgChannelDTO> existingChannels, String channelName)
{
return UserFailureException.fromTemplate(
"Experiment with id '%d' has already some channels registered "
+ "and does not have a channel with a name '%s'. "
+ "Register a new experiment to use new channels. "
+ "Available channel names in this experiment: %s.", expId, channelName,
existingChannels.keySet());
}
private ImgChannelDTO createChannel(long expId, HCSImageFileExtractionResult.Channel channel)
{
ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
long channelId = dao.addChannel(channelDTO);
channelDTO.setId(channelId);
return channelDTO;
}
private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel,
long expId)
{
return ImgChannelDTO.createExperimentChannel(channel.getName().toUpperCase(), channel
.tryGetDescription(), channel.tryGetWavelength(), expId);
}
private static class AcquiredImageInStack private static class AcquiredImageInStack
{ {
private final String channelName; private final String channelName;
...@@ -273,7 +166,13 @@ public class HCSDatasetUploader ...@@ -273,7 +166,13 @@ public class HCSDatasetUploader
{ {
for (AcquiredImageInStack image : images) for (AcquiredImageInStack image : images)
{ {
long channelTechId = channelsMap.get(image.getChannelName()); String channelName = image.getChannelName().toUpperCase();
Long channelTechId = channelsMap.get(channelName);
if (channelTechId == null)
{
throw new EnvironmentFailureException("Invalid channel name " + channelName
+ ". Available channels: " + channelsMap.keySet());
}
createImage(stackId, channelTechId, image); createImage(stackId, channelTechId, image);
} }
} }
...@@ -423,14 +322,4 @@ public class HCSDatasetUploader ...@@ -423,14 +322,4 @@ public class HCSDatasetUploader
{ {
return ScreeningContainerDatasetInfoHelper.createDataset(dao, info, contId); return ScreeningContainerDatasetInfoHelper.createDataset(dao, info, contId);
} }
private long getOrCreateContainer(long expId, ScreeningContainerDatasetInfo info)
{
return ScreeningContainerDatasetInfoHelper.getOrCreateContainer(dao, info, expId);
}
private long getOrCreateExperiment(ScreeningContainerDatasetInfo info)
{
return ScreeningContainerDatasetInfoHelper.getOrCreateExperiment(dao, info);
}
} }
...@@ -143,7 +143,7 @@ public final class HCSImageCheckList ...@@ -143,7 +143,7 @@ public final class HCSImageCheckList
this.wellCol = wellCol; this.wellCol = wellCol;
this.tileRow = tileRow; this.tileRow = tileRow;
this.tileCol = tileCol; this.tileCol = tileCol;
this.channelName = channelName; this.channelName = channelName.toUpperCase();
} }
private final static String toString(final int row, final int col, final String type) private final static String toString(final int row, final int col, final String type)
......
...@@ -16,7 +16,15 @@ ...@@ -16,7 +16,15 @@
package ch.systemsx.cisd.openbis.dss.etl; package ch.systemsx.cisd.openbis.dss.etl;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import ch.systemsx.cisd.common.exceptions.UserFailureException;
import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingUploadDAO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgChannelDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgContainerDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgContainerDTO;
import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO; import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO;
...@@ -28,76 +36,291 @@ import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO; ...@@ -28,76 +36,291 @@ import ch.systemsx.cisd.openbis.dss.etl.dataaccess.ImgDatasetDTO;
*/ */
public class ScreeningContainerDatasetInfoHelper public class ScreeningContainerDatasetInfoHelper
{ {
private final ScreeningContainerDatasetInfo info;
private final IImagingUploadDAO dao; private final IImagingUploadDAO dao;
public ScreeningContainerDatasetInfoHelper(IImagingUploadDAO dao, public ScreeningContainerDatasetInfoHelper(IImagingUploadDAO dao)
ScreeningContainerDatasetInfo info)
{ {
this.dao = dao; this.dao = dao;
this.info = info;
} }
public long getOrCreateExperiment() public ExperimentAndContainerIds getOrCreateExperimentAndContainer(
{ ScreeningContainerDatasetInfo info)
return getOrCreateExperiment(dao, info);
}
public long getOrCreateContainer(long expId)
{ {
return getOrCreateContainer(dao, info, expId); return getOrCreateExperimentAndContainer(dao, info);
} }
public long getOrCreateDataset(long contId) public long createDataset(long contId, ScreeningContainerDatasetInfo info)
{ {
ImgDatasetDTO dataset = dao.tryGetDatasetByPermId(info.getDatasetPermId()); return createDataset(dao, info, contId);
if (null != dataset)
{
return dataset.getId();
} else
{
return createDataset(dao, info, contId);
}
} }
// Package-visible static methods // Package-visible static methods
static long createDataset(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, long contId) public static long createDataset(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info,
long contId)
{ {
System.out.println("dataset perm id = " + info.getDatasetPermId());
ImgDatasetDTO dataset = ImgDatasetDTO dataset =
new ImgDatasetDTO(info.getDatasetPermId(), info.getTileRows(), info new ImgDatasetDTO(info.getDatasetPermId(), info.getTileRows(), info
.getTileColumns(), contId); .getTileColumns(), contId);
return dao.addDataset(dataset); return dao.addDataset(dataset);
} }
static long getOrCreateContainer(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info, /**
long expId) * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
* if they don't exist is in synchronized block and uses currently opened transaction. Then the
* transaction is closed and data set is added to the DB in second transaction. If second
* transaction will be rolled back sample and experiment created in first transaction will stay
* in the DB.
*/
public static ExperimentAndContainerIds getOrCreateExperimentAndContainer(
IImagingUploadDAO dao, ScreeningContainerDatasetInfo info)
{
synchronized (IImagingUploadDAO.class)
{
CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, exp.getId());
if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
{
// without this commit other threads will not see the new experiment/sample when the
// synchronized block ends
dao.commit();
}
return new ExperimentAndContainerIds(exp.getId(), cont.getId());
}
}
/**
* NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
* if they don't exist is in synchronized block and uses currently opened transaction. Then the
* transaction is closed and data set is added to the DB in second transaction. If second
* transaction will be rolled back sample and experiment created in first transaction will stay
* in the DB.
*/
public static ExperimentWithChannelsAndContainer getOrCreateExperimentWithChannelsAndContainer(
IImagingUploadDAO dao, ScreeningContainerDatasetInfo info,
Set<HCSImageFileExtractionResult.Channel> channels)
{
ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao);
synchronized (IImagingUploadDAO.class)
{
CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
long expId = exp.getId();
CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, expId);
Map<String, Long/* (tech id */> channelsMap =
helper.getOrCreateChannels(expId, channels);
if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
{
// without this commit other threads will not see the new experiment/sample when the
// synchronized block ends
dao.commit();
}
return new ExperimentWithChannelsAndContainer(expId, cont.getId(), channelsMap);
}
}
private static CreatedOrFetchedEntity getOrCreateContainer(IImagingUploadDAO dao,
ScreeningContainerDatasetInfo info, long expId)
{ {
String containerPermId = info.getContainerPermId(); String containerPermId = info.getContainerPermId();
Long containerId = dao.tryGetContainerIdPermId(containerPermId); Long containerId = dao.tryGetContainerIdPermId(containerPermId);
if (containerId != null) if (containerId != null)
{ {
return containerId; return new CreatedOrFetchedEntity(true, containerId);
} else } else
{ {
ImgContainerDTO container = ImgContainerDTO container =
new ImgContainerDTO(containerPermId, info.getContainerRows(), info new ImgContainerDTO(containerPermId, info.getContainerRows(), info
.getContainerColumns(), expId); .getContainerColumns(), expId);
return dao.addContainer(container); containerId = dao.addContainer(container);
return new CreatedOrFetchedEntity(false, containerId);
} }
} }
static long getOrCreateExperiment(IImagingUploadDAO dao, ScreeningContainerDatasetInfo info) private static CreatedOrFetchedEntity getOrCreateExperiment(IImagingUploadDAO dao,
ScreeningContainerDatasetInfo info)
{ {
String experimentPermId = info.getExperimentPermId(); String experimentPermId = info.getExperimentPermId();
Long expId = dao.tryGetExperimentIdByPermId(experimentPermId); Long expId = dao.tryGetExperimentIdByPermId(experimentPermId);
if (expId != null) if (expId != null)
{ {
return expId; return new CreatedOrFetchedEntity(true, expId);
} else
{
expId = dao.addExperiment(experimentPermId);
return new CreatedOrFetchedEntity(false, expId);
}
}
private static class CreatedOrFetchedEntity
{
private final boolean alreadyExisted;
private final long id;
public CreatedOrFetchedEntity(boolean alreadyExisted, long id)
{
this.alreadyExisted = alreadyExisted;
this.id = id;
}
public boolean hasAlreadyExisted()
{
return alreadyExisted;
}
public long getId()
{
return id;
}
}
public static class ExperimentAndContainerIds
{
private final long experimentId;
private final long containerId;
public ExperimentAndContainerIds(long experimentId, long containerId)
{
this.experimentId = experimentId;
this.containerId = containerId;
}
public long getExperimentId()
{
return experimentId;
}
public long getContainerId()
{
return containerId;
}
}
public static class ExperimentWithChannelsAndContainer extends ExperimentAndContainerIds
{
private final Map<String, Long/* (tech id */> channelsMap;
public ExperimentWithChannelsAndContainer(long experimentId, long containerId,
Map<String, Long> channelsMap)
{
super(experimentId, containerId);
this.channelsMap = channelsMap;
}
public Map<String, Long> getChannelsMap()
{
return channelsMap;
}
}
// ------ channels creation ------------------------------
private Map<String, Long> getOrCreateChannels(long expId,
Set<HCSImageFileExtractionResult.Channel> channels)
{
List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId);
if (allChannels.size() == 0)
{
return createChannels(expId, channels);
} else } else
{ {
return dao.addExperiment(experimentPermId); return updateChannels(expId, channels, allChannels);
}
}
private Map<String, Long> updateChannels(long expId, Set<Channel> channels,
List<ImgChannelDTO> allChannels)
{
Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels);
Map<String, Long> map = new HashMap<String, Long>();
for (HCSImageFileExtractionResult.Channel channel : channels)
{
ImgChannelDTO channelDTO = updateChannel(channel, expId, existingChannels);
addChannel(map, channelDTO);
} }
return map;
}
private Map<String, Long> createChannels(long expId, Set<Channel> channels)
{
Map<String, Long> map = new HashMap<String, Long>();
for (HCSImageFileExtractionResult.Channel channel : channels)
{
ImgChannelDTO channelDTO = createChannel(expId, channel);
addChannel(map, channelDTO);
}
return map;
}
private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO)
{
map.put(channelDTO.getName(), channelDTO.getId());
}
private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels)
{
Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>();
for (ImgChannelDTO channel : channels)
{
nameMap.put(channel.getName().toUpperCase(), channel);
}
return nameMap;
}
private ImgChannelDTO updateChannel(HCSImageFileExtractionResult.Channel channel, long expId,
Map<String, ImgChannelDTO> existingChannels)
{
ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
String channelName = channelDTO.getName();
ImgChannelDTO existingChannel = existingChannels.get(channelName);
if (existingChannel == null)
{
throw createInvalidNewChannelException(expId, existingChannels, channelName);
}
// a channel with a specified name already exists for an experiment, its description
// will be updated. Wavelength will be updated only if it was null before.
if (channelDTO.getWavelength() == null)
{
channelDTO.setWavelength(existingChannel.getWavelength());
}
if (existingChannel.getWavelength() != null
&& existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false)
{
throw UserFailureException.fromTemplate(
"There are already datasets registered for the experiment "
+ "which use the same channel name, but with a different wavelength! "
+ "Channel %s, old wavelength %d, new wavelength %d.", channelName,
existingChannel.getWavelength(), channelDTO.getWavelength());
}
channelDTO.setId(existingChannel.getId());
dao.updateChannel(channelDTO);
return channelDTO;
}
private static UserFailureException createInvalidNewChannelException(long expId,
Map<String, ImgChannelDTO> existingChannels, String channelName)
{
return UserFailureException.fromTemplate(
"Experiment with id '%d' has already some channels registered "
+ "and does not have a channel with a name '%s'. "
+ "Register a new experiment to use new channels. "
+ "Available channel names in this experiment: %s.", expId, channelName,
existingChannels.keySet());
}
private ImgChannelDTO createChannel(long expId, HCSImageFileExtractionResult.Channel channel)
{
ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
long channelId = dao.addChannel(channelDTO);
channelDTO.setId(channelId);
return channelDTO;
}
private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel,
long expId)
{
return ImgChannelDTO.createExperimentChannel(channel.getName().toUpperCase(), channel
.tryGetDescription(), channel.tryGetWavelength(), expId);
} }
} }
...@@ -43,11 +43,9 @@ public class FeatureVectorUploader ...@@ -43,11 +43,9 @@ public class FeatureVectorUploader
public void uploadFeatureVectors(List<CanonicalFeatureVector> fvecs) public void uploadFeatureVectors(List<CanonicalFeatureVector> fvecs)
{ {
ScreeningContainerDatasetInfoHelper helper = ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao);
new ScreeningContainerDatasetInfoHelper(dao, info); long contId = helper.getOrCreateExperimentAndContainer(info).getContainerId();
long expId = helper.getOrCreateExperiment(); long dataSetId = helper.createDataset(contId, info);
long contId = helper.getOrCreateContainer(expId);
long dataSetId = helper.getOrCreateDataset(contId);
for (CanonicalFeatureVector fvec : fvecs) for (CanonicalFeatureVector fvec : fvecs)
{ {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment