diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreening.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreening.java index bb7d43982d7e0a13a46bc5e049920a04dceb06c4..37d136ffbb28f17cc91dd88f1f834262ee4dec93 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreening.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreening.java @@ -165,18 +165,16 @@ public class DssServiceRpcScreening extends AbstractDssServiceRpc<IDssServiceRpc public List<String> listAvailableFeatureCodes(String sessionToken, List<? extends IFeatureVectorDatasetIdentifier> featureDatasets) { + List<ImgFeatureDefDTO> featureDefinitions = getFeatureDefinitions(featureDatasets); + + // add only new feature names List<String> result = new ArrayList<String>(); // keep the order - for (IFeatureVectorDatasetIdentifier identifier : featureDatasets) + for (ImgFeatureDefDTO featureDefinition : featureDefinitions) { - // add only new feature names - List<ImgFeatureDefDTO> featureDefinitions = getFeatureDefinitions(identifier); - for (ImgFeatureDefDTO featureDefinition : featureDefinitions) + String featureCode = featureDefinition.getCode(); + if (result.contains(featureCode) == false) { - String featureCode = featureDefinition.getCode(); - if (result.contains(featureCode) == false) - { - result.add(featureCode); - } + result.add(featureCode); } } return result; @@ -771,6 +769,42 @@ public class DssServiceRpcScreening extends AbstractDssServiceRpc<IDssServiceRpc } } + private List<ImgDatasetDTO> getDatasets(List<? extends IDatasetIdentifier> datasetIdents) + { + String[] permIds = extractPermIds(datasetIdents); + List<ImgDatasetDTO> datasets = getDAO().listDatasetsByPermId(permIds); + if (datasets.size() != datasetIdents.size()) + { + Set<String> missing = new HashSet<String>(Arrays.asList(permIds)); + for (ImgDatasetDTO dataset : datasets) + { + missing.remove(dataset.getPermId()); + } + throw new UserFailureException("Following datasets could not be found: " + missing); + } + return datasets; + } + + private long[] extractIds(List<ImgDatasetDTO> dataSets) + { + long[] ids = new long[dataSets.size()]; + for (int i = 0; i < ids.length; i++) + { + ids[i] = dataSets.get(i).getId(); + } + return ids; + } + + private static String[] extractPermIds(List<? extends IDatasetIdentifier> datasets) + { + String[] permIds = new String[datasets.size()]; + for (int i = 0; i < permIds.length; i++) + { + permIds[i] = datasets.get(i).getDatasetCode(); + } + return permIds; + } + private ImgDatasetDTO getImagingDataset(IDatasetIdentifier datasetIdentifier) { ImgDatasetDTO dataset = getDAO().tryGetDatasetByPermId(datasetIdentifier.getDatasetCode()); @@ -1001,10 +1035,11 @@ public class DssServiceRpcScreening extends AbstractDssServiceRpc<IDssServiceRpc .matches(ScreeningConstants.ANY_MICROSCOPY_IMAGE_DATASET_TYPE_PATTERN); } - private List<ImgFeatureDefDTO> getFeatureDefinitions(IDatasetIdentifier identifier) + private List<ImgFeatureDefDTO> getFeatureDefinitions( + List<? extends IDatasetIdentifier> featureDatasets) { - ImgDatasetDTO dataSet = getImagingDataset(identifier); - return getDAO().listFeatureDefsByDataSetId(dataSet.getId()); + List<ImgDatasetDTO> dataSets = getDatasets(featureDatasets); + return getDAO().listFeatureDefsByDataSetIds(extractIds(dataSets)); } private IImagingReadonlyQueryDAO getDAO() diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/PlateDatasetLoader.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/PlateDatasetLoader.java index fbd9ef33486872402048480b64556514f6a1dc9e..267072a7d3c0d0b7b7f470cd89ca3d481644d695 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/PlateDatasetLoader.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/PlateDatasetLoader.java @@ -115,7 +115,7 @@ class PlateDatasetLoader private void loadDatasets() { - List<Long> sampleIds = extractSampleIds(); + Set<Long> sampleIds = extractSampleIds(); IDatasetLister datasetLister = businessObjectFactory.createDatasetLister(session); datasets = datasetLister.listBySampleIds(sampleIds); datasets = @@ -227,7 +227,7 @@ class PlateDatasetLoader throw new UserFailureException("Sample '" + sample.getIdentifier() + "' has no property " + ScreeningConstants.PLATE_GEOMETRY); } - + protected Map<String, String> extractProperties(ExternalData dataSet) { final Map<String, String> properties = new HashMap<String, String>(); @@ -289,14 +289,9 @@ class PlateDatasetLoader return sampleCodesArray; } - private List<Long> extractSampleIds() + private Set<Long> extractSampleIds() { - ArrayList<Long> sampleIds = new ArrayList<Long>(samples.size()); - for (Sample sample : samples) - { - sampleIds.add(sample.getId()); - } - return sampleIds; + return samplesById.keySet(); } protected static String getDataStoreUrlFromDataStore(DataStore dataStore) diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoader.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoader.java index 740eec7969eef4c8fd93fcddee6c87bbd4a6059a..5cd5ea43f46e8d790a94ab8fd3d6ed8fe61d5046 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoader.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoader.java @@ -110,7 +110,8 @@ public class WellContentLoader IScreeningBusinessObjectFactory businessObjectFactory, IDAOFactory daoFactory, TechId geneMaterialId) { - final WellContentLoader loader = new WellContentLoader(session, businessObjectFactory, daoFactory); + final WellContentLoader loader = + new WellContentLoader(session, businessObjectFactory, daoFactory); return loader.loadLocations(geneMaterialId); } @@ -123,12 +124,18 @@ public class WellContentLoader IScreeningBusinessObjectFactory businessObjectFactory, IDAOFactory daoFactory, WellSearchCriteria materialCriteria) { + long start = System.currentTimeMillis(); WellContentLoader loader = new WellContentLoader(session, businessObjectFactory, daoFactory); - List<WellContent> locations = loader.loadLocations(materialCriteria); + + operationLog.info(String.format("[%d msec] Load %d locations.", + (System.currentTimeMillis() - start), locations.size())); + List<WellContent> withPropsAndDataSets = loader.enrichWithDatasets(locations); - return loader.enrichWithFeatureVectors(withPropsAndDataSets); + List<WellContent> withFeatureVectors = + loader.enrichWithFeatureVectors(withPropsAndDataSets); + return withFeatureVectors; } /** @@ -189,14 +196,24 @@ public class WellContentLoader private List<WellContent> enrichWithDatasets(List<WellContent> locations) { + long start = System.currentTimeMillis(); + Collection<PlateIdentifier> plates = extractPlates(locations); FeatureVectorDatasetLoader datasetsRetriever = new FeatureVectorDatasetLoader(session, businessObjectFactory, null, plates); Collection<ExternalData> imageDatasets = datasetsRetriever.getImageDatasets(); - Map<String, ImageDatasetParameters> imageParams = loadImagesReport(imageDatasets); - Collection<ExternalData> featureVectorDatasets = datasetsRetriever.getFeatureVectorDatasets(); + + operationLog.info(String.format("[%d msec] load datasets (%d image, %d fv).", + (System.currentTimeMillis() - start), imageDatasets.size(), + featureVectorDatasets.size())); + start = System.currentTimeMillis(); + + Map<String, ImageDatasetParameters> imageParams = loadImagesReport(imageDatasets); + operationLog.info(String.format("[%d msec] loadImagesReport", + (System.currentTimeMillis() - start))); + Collection<ExternalData> childlessImageDatasets = selectChildlessImageDatasets(imageDatasets, featureVectorDatasets); @@ -557,6 +574,7 @@ public class WellContentLoader return map; } + // TODO 2011-04-04, Tomasz Pylak: inefficient, rewrite to use single queryfor all datasets private Map<String/* dataset code */, ImageDatasetParameters> loadImagesReport( Collection<ExternalData> imageDatasets) { @@ -581,7 +599,7 @@ public class WellContentLoader private List<WellContent> loadLocations(WellSearchCriteria materialCriteria) { - DataIterator<WellContentQueryResult> locations; + Iterable<WellContentQueryResult> locations; MaterialSearchCriteria materialSearchCriteria = materialCriteria.getMaterialSearchCriteria(); ExperimentSearchCriteria experiment = materialCriteria.getExperimentCriteria(); @@ -592,7 +610,15 @@ public class WellContentLoader materialSearchCriteria.tryGetMaterialCodesOrProperties(); Long expId = tryGetExperimentId(experiment); + long start = System.currentTimeMillis(); long[] materialIds = findMaterialIds(codesCriteria); + + operationLog.info(String.format( + "[%d msec] Finding %d materials for criteria '%s'. Result: %s", + (System.currentTimeMillis() - start), materialIds.length, codesCriteria, + Arrays.toString(materialIds))); + start = System.currentTimeMillis(); + if (expId == null) { locations = @@ -705,22 +731,18 @@ public class WellContentLoader return convert(locations); } - private List<WellContent> convert(DataIterator<WellContentQueryResult> queryResults) + private List<WellContent> convert(Iterable<WellContentQueryResult> queryResults) { - List<WellContentQueryResult> uniqueResults = removeDuplicateWells(queryResults); + List<WellContent> wellContents = removeDuplicateWells(queryResults); - ArrayList<WellContent> wellContents = new ArrayList<WellContent>(); - for (WellContentQueryResult uniqueWell : uniqueResults) - { - wellContents.add(convert(uniqueWell)); - } List<WellContent> withProperties = enrichWithWellProperties(wellContents); IMaterialLister materialLister = businessObjectFactory.createMaterialLister(session); List<Material> containedMaterials = getMaterialsWithDuplicates(withProperties); materialLister.enrichWithProperties(containedMaterials); + return wellContents; } - + private static Set<Material> extractMaterials(List<WellContent> locations) { Set<Material> materials = new HashSet<Material>(); @@ -746,11 +768,10 @@ public class WellContentLoader return materials; } - private List<WellContentQueryResult> removeDuplicateWells( - DataIterator<WellContentQueryResult> queryResults) + private List<WellContent> removeDuplicateWells(Iterable<WellContentQueryResult> queryResults) { + List<WellContent> wellContents = new ArrayList<WellContent>(); Set<String> seenWellPermIds = new HashSet<String>(); - List<WellContentQueryResult> result = new ArrayList<WellContentQueryResult>(); for (WellContentQueryResult queryResult : queryResults) { @@ -758,10 +779,10 @@ public class WellContentLoader if (false == seenWellPermIds.contains(permId)) { seenWellPermIds.add(permId); - result.add(queryResult); + wellContents.add(convert(queryResult)); } } - return result; + return wellContents; } private static WellContent convert(WellContentQueryResult well) @@ -769,11 +790,11 @@ public class WellContentLoader WellLocation location = ScreeningUtils.tryCreateLocationFromMatrixCoordinate(well.well_code); EntityReference wellReference = - new EntityReference(well.well_id, well.well_code, - well.well_type_code, EntityKind.SAMPLE, well.well_perm_id); + new EntityReference(well.well_id, well.well_code, well.well_type_code, + EntityKind.SAMPLE, well.well_perm_id); EntityReference plate = - new EntityReference(well.plate_id, well.plate_code, - well.plate_type_code, EntityKind.SAMPLE, well.plate_perm_id); + new EntityReference(well.plate_id, well.plate_code, well.plate_type_code, + EntityKind.SAMPLE, well.plate_perm_id); return new WellContent(location, wellReference, plate, convertExperiment(well)); } diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoader.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoader.java index ef5d0aed6902ca71bbce0a60e1fa148969d2de61..e197893a4db84bafba46695221ac156fba80e4d5 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoader.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoader.java @@ -19,6 +19,7 @@ package ch.systemsx.cisd.openbis.plugin.screening.shared.imaging; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -30,6 +31,9 @@ import java.util.Set; import org.apache.log4j.Logger; +import ch.systemsx.cisd.common.collections.GroupByMap; +import ch.systemsx.cisd.common.collections.IKeyExtractor; +import ch.systemsx.cisd.common.collections.TableMap; import ch.systemsx.cisd.common.exceptions.UserFailureException; import ch.systemsx.cisd.common.logging.LogCategory; import ch.systemsx.cisd.common.logging.LogFactory; @@ -43,6 +47,7 @@ import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.WellFeatureVec import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.WellLocation; import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.FeatureTableRow; import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateFeatureValues; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.AbstractImgIdentifiable; import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.IImagingReadonlyQueryDAO; import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgContainerDTO; import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO; @@ -71,6 +76,8 @@ public class FeatureVectorLoader private Map<ImgFeatureDefDTO, List<ImgFeatureValuesDTO>> featureDefToValuesMap; private FeatureVocabularyTermsMap featureDefToVocabularyTerms; + + private ImgContainerDTO container; } public static interface IMetadataProvider @@ -126,7 +133,7 @@ public class FeatureVectorLoader throw new UserFailureException("Unkown data set " + dataSetCode); } - final List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetId(dataSet.getId()); + final List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetIds(dataSet.getId()); final List<CodeAndLabel> result = new ArrayList<CodeAndLabel>(); for (ImgFeatureDefDTO featureDef : featureDefs) { @@ -165,7 +172,7 @@ public class FeatureVectorLoader } /** - * fetches specified features of all wells + * Fetches specified features of all wells. * * @param featureCodes empty list means no filtering. * @param metadataProviderOrNull if null plate identifiers in FeatureTableRow are not set @@ -243,24 +250,22 @@ public class FeatureVectorLoader } } - private void addFeatureVectorsOfDataSetsOrDie(Collection<String> datasetCodes) + private void addFeatureVectorsOfDataSetsIfPossible(Collection<String> datasetCodes) { - for (String datasetCode : datasetCodes) + final List<ImgDatasetDTO> dataSets = listDatasetsByPermId(datasetCodes); + if (dataSets.size() != datasetCodes.size()) { - addFeatureVectorsOfDataSetOrDie(datasetCode); + operationLog.warn(createUnknownDatasetMessage(datasetCodes, dataSets)); } + addFeatureVectorsOfDataSets(dataSets); } - private void addFeatureVectorsOfDataSetsIfPossible(Collection<String> datasetCodes) + private String createUnknownDatasetMessage(Collection<String> requestedDatasetCodes, + final List<ImgDatasetDTO> existingDataSets) { - for (String datasetCode : datasetCodes) - { - if (addFeatureVectorsOfDataSet(datasetCode) == false) - { - operationLog.warn("Dataset " + datasetCode - + " contains no feature vectors or does not exist."); - } - } + return String.format( + "Some of the datasets are unknown! Requested datasets: %s. Found datasets: %s.", + requestedDatasetCodes, existingDataSets); } private static Set<String> extractDatasetCodesFromSimpleReferences( @@ -308,42 +313,56 @@ public class FeatureVectorLoader * @throws UserFailureException if dataset with the specified code contains no feature vectors * or does not exist. */ - void addFeatureVectorsOfDataSetOrDie(String dataSetCode) + void addFeatureVectorsOfDataSetsOrDie(Collection<String> datasetCodes) { - boolean added = addFeatureVectorsOfDataSet(dataSetCode); - if (added == false) + final List<ImgDatasetDTO> dataSets = listDatasetsByPermId(datasetCodes); + if (dataSets.size() != datasetCodes.size()) { - throw new UserFailureException("Unkown data set " + dataSetCode); + throw new UserFailureException(createUnknownDatasetMessage(datasetCodes, dataSets)); } + addFeatureVectorsOfDataSets(dataSets); + } + + private List<ImgDatasetDTO> listDatasetsByPermId(Collection<String> datasetCodes) + { + return dao.listDatasetsByPermId(datasetCodes.toArray(new String[0])); } /** - * Adds feature vectors for specified feature vector data set code. - * - * @return false if dataset with the specified code contains no feature vectors or does not - * exist.<br> - * true if feature vectors have been added. + * Adds feature vectors for specified feature vector data sets codes. */ - boolean addFeatureVectorsOfDataSet(String dataSetCode) + void addFeatureVectorsOfDataSets(List<ImgDatasetDTO> datasets) { - final ImgDatasetDTO dataSet = dao.tryGetDatasetByPermId(dataSetCode); - if (dataSet == null) + long start = System.currentTimeMillis(); + DatasetFeatureDefinitionCachedLister lister = + new DatasetFeatureDefinitionCachedLister(datasets, featureCodes, useAllFeatures, + dao); + for (ImgDatasetDTO dataset : datasets) { - return false; - } - final DatasetFeaturesBundle bundle = new DatasetFeaturesBundle(); - final Map<String, ImgFeatureDefDTO> featureCodeToDefMap = - createFeatureCodeToDefMap(dao, dataSet); - bundle.dataSet = dataSet; - bundle.featureDefToValuesMap = new HashMap<ImgFeatureDefDTO, List<ImgFeatureValuesDTO>>(); - bundle.featureDefToVocabularyTerms = - createFeatureIdToVocabularyTermsMap(dao, dataSet, - bundle.featureDefToValuesMap.keySet()); - bundles.add(bundle); - if (useAllFeatures) - { - featureCodes.addAll(featureCodeToDefMap.keySet()); + Map<String, ImgFeatureDefDTO> featureCodeToDefMap = + lister.getFeatureCodeToDefMap(dataset); + + if (useAllFeatures) + { + featureCodes.addAll(featureCodeToDefMap.keySet()); + } + assignIndicesToFeatures(featureCodeToDefMap); + + DatasetFeaturesBundle bundle = new DatasetFeaturesBundle(); + bundle.featureDefToValuesMap = lister.getFeatureValues(dataset); + bundle.dataSet = dataset; + bundle.container = lister.getContainer(dataset); + bundle.featureDefToVocabularyTerms = + createFeatureIdToVocabularyTermsMap(dataset, + bundle.featureDefToValuesMap.keySet(), lister); + bundles.add(bundle); } + operationLog.info(String.format("[%d msec] Fetching %d features from %d datasets.", + (System.currentTimeMillis() - start), featureCodes.size(), datasets.size())); + } + + private void assignIndicesToFeatures(final Map<String, ImgFeatureDefDTO> featureCodeToDefMap) + { for (String featureCode : featureCodes) { final ImgFeatureDefDTO featureDefinition = featureCodeToDefMap.get(featureCode); @@ -355,42 +374,213 @@ public class FeatureVectorLoader featureCodeLabelToIndexMap.put(codeAndLabel, new Integer( featureCodeLabelToIndexMap.size())); } - List<ImgFeatureValuesDTO> featureValueSets = - dao.getFeatureValues(featureDefinition); - if (featureValueSets.isEmpty()) + } + } + } + + /** + * Helper class which fetches all the results from the database at the beginning with one query, + * groups them and serves them from the cache. + */ + private static class DatasetFeatureDefinitionCachedLister + { + private final GroupByMap<Long/* dataset id */, ImgFeatureDefDTO> requestedFeatureDefinitionsMap; + + private final GroupByMap<Long/* dataset id */, ImgFeatureVocabularyTermDTO> featureVocabularyTermsMap; + + // values for all datasets and requested features + private final GroupByMap</* feature def id */Long, ImgFeatureValuesDTO> featureValuesMap; + + private final TableMap<Long/* container id */, ImgContainerDTO> containersByIdMap; + + /** + * @datasets datasets in which we are interested + * @param featureCodes codes of features for which we want to fetch the values + * @param useAllFeatures if true the featureCodes param is ignored and values are fetched + * for all features + */ + public DatasetFeatureDefinitionCachedLister(List<ImgDatasetDTO> datasets, + Set<String> featureCodes, boolean useAllFeatures, IImagingReadonlyQueryDAO dao) + { + this.containersByIdMap = createContainerByIdMap(datasets, dao); + + long[] datasetIds = extractIds(datasets); + List<ImgFeatureDefDTO> requestedFeatureDefinitions = + listRequestedFeatureDefinitions(datasetIds, featureCodes, useAllFeatures, dao); + this.requestedFeatureDefinitionsMap = + GroupByMap.create(requestedFeatureDefinitions, + new IKeyExtractor<Long, ImgFeatureDefDTO>() + { + public Long getKey(ImgFeatureDefDTO featureDef) + { + return featureDef.getDataSetId(); + } + }); + + List<ImgFeatureVocabularyTermDTO> featureVocabularyTerms = + dao.listFeatureVocabularyTermsByDataSetId(datasetIds); + this.featureVocabularyTermsMap = + GroupByMap.create(featureVocabularyTerms, + new IKeyExtractor<Long, ImgFeatureVocabularyTermDTO>() + { + public Long getKey( + ImgFeatureVocabularyTermDTO featureVocabularyTerm) + { + return featureVocabularyTerm.getDataSetId(); + } + }); + + List<ImgFeatureValuesDTO> requestedFeatureValues = + dao.getFeatureValues(extractIds(requestedFeatureDefinitions)); + this.featureValuesMap = + GroupByMap.create(requestedFeatureValues, + new IKeyExtractor<Long, ImgFeatureValuesDTO>() + { + public Long getKey(ImgFeatureValuesDTO featureVal) + { + return featureVal.getFeatureDefId(); + } + }); + } + + private static TableMap<Long, ImgContainerDTO> createContainerByIdMap( + List<ImgDatasetDTO> datasets, IImagingReadonlyQueryDAO dao) + { + List<ImgContainerDTO> containers = + dao.listContainersByIds(extractContainerIds(datasets)); + return new TableMap<Long, ImgContainerDTO>(containers, + new IKeyExtractor<Long, ImgContainerDTO>() + { + public Long getKey(ImgContainerDTO container) + { + return container.getId(); + } + }); + } + + private List<ImgFeatureDefDTO> listRequestedFeatureDefinitions(long[] datasetIds, + Set<String> featureCodes, boolean useAllFeatures, IImagingReadonlyQueryDAO dao) + { + List<ImgFeatureDefDTO> allFeatureDefinitions = + dao.listFeatureDefsByDataSetIds(datasetIds); + List<ImgFeatureDefDTO> requestedFeatureDefinitions = + extractRequestedFeatureDefinitions(featureCodes, useAllFeatures, + allFeatureDefinitions); + return requestedFeatureDefinitions; + } + + public ImgContainerDTO getContainer(ImgDatasetDTO dataset) + { + return containersByIdMap.getOrDie(dataset.getContainerId()); + } + + public Map<String, ImgFeatureDefDTO> getFeatureCodeToDefMap(ImgDatasetDTO dataset) + { + List<ImgFeatureDefDTO> featureDefinitions = getRequestedFeatureDefinitions(dataset); + return createCodeToDefMap(featureDefinitions); + } + + private List<ImgFeatureDefDTO> getRequestedFeatureDefinitions(ImgDatasetDTO dataset) + { + return requestedFeatureDefinitionsMap.getOrDie(dataset.getId()); + } + + public List<ImgFeatureVocabularyTermDTO> getFeatureVocabularyTerms(ImgDatasetDTO dataSet) + { + List<ImgFeatureVocabularyTermDTO> terms = + featureVocabularyTermsMap.tryGet(dataSet.getId()); + if (terms == null) + { + return Collections.emptyList(); + } + return terms; + } + + public Map<ImgFeatureDefDTO, List<ImgFeatureValuesDTO>> getFeatureValues( + ImgDatasetDTO dataset) + { + List<ImgFeatureDefDTO> datasetFeatureDefinitions = + getRequestedFeatureDefinitions(dataset); + Map<ImgFeatureDefDTO, List<ImgFeatureValuesDTO>> defToValuesMap = + new HashMap<ImgFeatureDefDTO, List<ImgFeatureValuesDTO>>(); + for (ImgFeatureDefDTO featureDef : datasetFeatureDefinitions) + { + List<ImgFeatureValuesDTO> values = featureValuesMap.getOrDie(featureDef.getId()); + defToValuesMap.put(featureDef, values); + } + return defToValuesMap; + } + + private static List<ImgFeatureDefDTO> extractRequestedFeatureDefinitions( + Set<String> featureCodes, boolean useAllFeatures, + List<ImgFeatureDefDTO> allFeatureDefinitions) + { + if (useAllFeatures) + { + return allFeatureDefinitions; + } else + { + return filterByCode(allFeatureDefinitions, featureCodes); + } + } + + private static List<ImgFeatureDefDTO> filterByCode( + List<ImgFeatureDefDTO> allFeatureDefinitions, Set<String> featureCodes) + { + List<ImgFeatureDefDTO> result = new ArrayList<ImgFeatureDefDTO>(); + for (ImgFeatureDefDTO featureDef : allFeatureDefinitions) + { + if (featureCodes.contains(featureDef.getCode())) { - throw new UserFailureException("At least one set of values for feature " - + featureCode + " of data set " + dataSetCode + " expected."); + result.add(featureDef); } - bundle.featureDefToValuesMap.put(featureDefinition, featureValueSets); } + return result; + } + + private static long[] extractContainerIds(List<ImgDatasetDTO> datasets) + { + long[] ids = new long[datasets.size()]; + int i = 0; + for (ImgDatasetDTO dataset : datasets) + { + ids[i++] = dataset.getContainerId(); + } + return ids; + } + + private static long[] extractIds(List<? extends AbstractImgIdentifiable> identifiables) + { + long[] ids = new long[identifiables.size()]; + int i = 0; + for (AbstractImgIdentifiable identifiable : identifiables) + { + ids[i++] = identifiable.getId(); + } + return ids; + } + + private static Map<String, ImgFeatureDefDTO> createCodeToDefMap( + final List<ImgFeatureDefDTO> featureDefinitions) + { + final Map<String, ImgFeatureDefDTO> featureCodeToDefMap = + new LinkedHashMap<String, ImgFeatureDefDTO>(); + for (ImgFeatureDefDTO def : featureDefinitions) + { + featureCodeToDefMap.put(def.getCode(), def); + } + return featureCodeToDefMap; } - return true; } private static FeatureVocabularyTermsMap createFeatureIdToVocabularyTermsMap( - IImagingReadonlyQueryDAO dao, ImgDatasetDTO dataSet, - Set<ImgFeatureDefDTO> datasetFeatureDefs) + ImgDatasetDTO dataSet, Set<ImgFeatureDefDTO> datasetFeatureDefs, + DatasetFeatureDefinitionCachedLister lister) { - List<ImgFeatureVocabularyTermDTO> allTerms = - dao.listFeatureVocabularyTermsByDataSetId(dataSet.getId()); + List<ImgFeatureVocabularyTermDTO> allTerms = lister.getFeatureVocabularyTerms(dataSet); return FeatureVocabularyTermsMap.createVocabularyTermsMap(allTerms, datasetFeatureDefs); } - private static Map<String, ImgFeatureDefDTO> createFeatureCodeToDefMap( - IImagingReadonlyQueryDAO dao, final ImgDatasetDTO dataSet) - { - final List<ImgFeatureDefDTO> featureDefinitions = - dao.listFeatureDefsByDataSetId(dataSet.getId()); - final Map<String, ImgFeatureDefDTO> featureCodeToDefMap = - new LinkedHashMap<String, ImgFeatureDefDTO>(); - for (ImgFeatureDefDTO def : featureDefinitions) - { - featureCodeToDefMap.put(def.getCode(), def); - } - return featureCodeToDefMap; - } - /** * Returns all feature codes/labels found. If the feature code list in the constructor is not * empty the result will a list where the codes are a subset of this list. @@ -409,7 +599,7 @@ public class FeatureVectorLoader List<FeatureTableRow> rows = new ArrayList<FeatureTableRow>(); for (DatasetFeaturesBundle bundle : bundles) { - ImgContainerDTO container = dao.getContainerById(bundle.dataSet.getContainerId()); + ImgContainerDTO container = bundle.container; SampleIdentifier identifier = tryGetSampleIdentifier(container); for (int rowIndex = 1; rowIndex <= container.getNumberOfRows(); rowIndex++) { @@ -471,7 +661,7 @@ public class FeatureVectorLoader { String dataSetCode = reference.getDatasetCode(); DatasetFeaturesBundle bundle = getDatasetFeaturesBundleOrDie(bundleMap, dataSetCode); - ImgContainerDTO container = dao.getContainerById(bundle.dataSet.getContainerId()); + ImgContainerDTO container = bundle.container; SampleIdentifier identifier = tryGetSampleIdentifier(container); final FeatureTableRow row = createFeatureTableRow(bundle, identifier, reference, diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java index 8e33b48e0187952cfac862318de230ca7fb53624..0f5fba7ee47f7877b185139eaffbfd9e122a8ac7 100644 --- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java +++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java @@ -22,6 +22,8 @@ import net.lemnik.eodsql.BaseQuery; import net.lemnik.eodsql.Select; import ch.systemsx.cisd.bds.hcs.Location; +import ch.systemsx.cisd.openbis.generic.server.dataaccess.db.LongArrayMapper; +import ch.systemsx.cisd.openbis.generic.server.dataaccess.db.StringArrayMapper; /** * Operations on imaging database which are read-only.<br> @@ -126,7 +128,7 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery * information about the spot to which it belongs. */ @Select("select i.*, ai.image_transformer_factory as image_transformer_factory, " - +"s.id as spot_id, ai.id as acquired_image_id " + + "s.id as spot_id, ai.id as acquired_image_id " + " from data_sets d " + " join channel_stacks cs on cs.ds_id = d.id " + " join spots s on cs.spot_id = s.id " @@ -215,6 +217,10 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery @Select("select * from DATA_SETS where PERM_ID = ?{1}") public ImgDatasetDTO tryGetDatasetByPermId(String datasetPermId); + @Select(sql = "select * from DATA_SETS where PERM_ID = any(?{1})", parameterBindings = + { StringArrayMapper.class }, fetchSize = FETCH_SIZE) + public List<ImgDatasetDTO> listDatasetsByPermId(String... datasetPermIds); + // ---------------- HCS - experiments, containers, channels --------------------------------- @Select("select * from EXPERIMENTS where PERM_ID = ?{1}") @@ -229,6 +235,10 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery @Select("select * from CONTAINERS where ID = ?{1}") public ImgContainerDTO getContainerById(long containerId); + @Select(sql = "select * from CONTAINERS where ID = any(?{1})", parameterBindings = + { LongArrayMapper.class }, fetchSize = FETCH_SIZE) + public List<ImgContainerDTO> listContainersByIds(long... containerIds); + // join with container is needed to use spots index @Select("select cs.* from CHANNEL_STACKS cs " + "join SPOTS s on s.id = cs.spot_id " @@ -252,15 +262,19 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery // ---------------- HCS - feature vectors --------------------------------- - @Select("select * from FEATURE_DEFS where DS_ID = ?{1}") - public List<ImgFeatureDefDTO> listFeatureDefsByDataSetId(long dataSetId); + @Select(sql = "select * from FEATURE_DEFS where DS_ID = any(?{1})", parameterBindings = + { LongArrayMapper.class }, fetchSize = FETCH_SIZE) + public List<ImgFeatureDefDTO> listFeatureDefsByDataSetIds(long... dataSetIds); - @Select("select t.* from FEATURE_VOCABULARY_TERMS t " - + "join FEATURE_DEFS fd on fd.id = t.fd_id " - + "where fd.DS_ID = ?{1} ") - public List<ImgFeatureVocabularyTermDTO> listFeatureVocabularyTermsByDataSetId(long dataSetId); + @Select(sql = "select t.*, fd.ds_id as DS_ID from FEATURE_VOCABULARY_TERMS t " + + "join FEATURE_DEFS fd on fd.id = t.fd_id " + + "where fd.DS_ID = any(?{1}) ", parameterBindings = + { LongArrayMapper.class }, fetchSize = FETCH_SIZE) + public List<ImgFeatureVocabularyTermDTO> listFeatureVocabularyTermsByDataSetId( + long... dataSetIds); - @Select(sql = "select * from FEATURE_VALUES where FD_ID = ?{1.id} order by T_in_SEC, Z_in_M", resultSetBinding = FeatureVectorDataObjectBinding.class) - public List<ImgFeatureValuesDTO> getFeatureValues(ImgFeatureDefDTO featureDef); + @Select(sql = "select * from FEATURE_VALUES where FD_ID = any(?{1}) order by T_in_SEC, Z_in_M", parameterBindings = + { LongArrayMapper.class }, fetchSize = FETCH_SIZE, resultSetBinding = FeatureVectorDataObjectBinding.class) + public List<ImgFeatureValuesDTO> getFeatureValues(long... featureDefIds); } diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/FeatureVectorDAOTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/FeatureVectorDAOTest.java index e456bfb5695e413edf2cf50acb95db1d5f19bf65..09dc14cf150a7f68f9424fb1d7dc54387cea8552 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/FeatureVectorDAOTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/FeatureVectorDAOTest.java @@ -103,7 +103,7 @@ public class FeatureVectorDAOTest extends AbstractDBTest dataset = createDataSet(); createFeatureDef(dataset); - List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetId(dataset.getId()); + List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetIds(dataset.getId()); assertEquals(1, featureDefs.size()); ImgFeatureDefDTO featureDef = featureDefs.get(0); @@ -113,7 +113,7 @@ public class FeatureVectorDAOTest extends AbstractDBTest testCreateAndListFeatureVocabularyValues(featureDef); createFeatureFloatValues(featureDef); - List<ImgFeatureValuesDTO> featureValuesList = dao.getFeatureValues(featureDef); + List<ImgFeatureValuesDTO> featureValuesList = dao.getFeatureValues(featureDef.getId()); assertEquals(1, featureValuesList.size()); ImgFeatureValuesDTO featureValues = featureValuesList.get(0); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java index e3428dd866627daf9c57d35ea079856af0a09a90..aa8ef9b6b8ea79bb4a687c5b94717a9c456af281 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java @@ -354,6 +354,10 @@ public class ImagingQueryDAOTest extends AbstractDBTest assertEquals(spotHeight, loadedContainer.getNumberOfRows()); assertEquals(experimentId, loadedContainer.getExperimentId()); + List<ImgContainerDTO> containers = dao.listContainersByIds(containerId); + assertEquals(1, containers.size()); + assertEquals(loadedContainer, containers.get(0)); + return containerId; } @@ -372,6 +376,12 @@ public class ImagingQueryDAOTest extends AbstractDBTest assertEquals(fieldsHeight, loadedDataset.getFieldNumberOfRows()); assertEquals(containerIdOrNull, loadedDataset.getContainerId()); + // test listDatasetsByPermId + final List<ImgDatasetDTO> datasets = dao.listDatasetsByPermId(new String[] + { permId, "not existing" }); + assertEquals(1, datasets.size()); + assertEquals(loadedDataset, datasets.get(0)); + return datasetId; } diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dto/api/impl/FeatureDefinitionValuesTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dto/api/impl/FeatureDefinitionValuesTest.java index 62d64687c400b2f59f225182bbdf40dc67088931..7b878d8a7bd6f6a7a7b301cd834e0539658ebebb 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dto/api/impl/FeatureDefinitionValuesTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dto/api/impl/FeatureDefinitionValuesTest.java @@ -96,17 +96,20 @@ public class FeatureDefinitionValuesTest extends AssertJUnit List<ImgFeatureVocabularyTermDTO> terms = vector.getVocabularyTerms(); assertNotNull(terms); assertEquals(2, terms.size()); - assertEquals("a", terms.get(0).getCode()); - assertEquals("b", terms.get(1).getCode()); + int termIxA = terms.get(0).getCode().equals("a") ? 0 : 1; + + assertEquals("a", terms.get(termIxA).getCode()); + assertEquals("b", terms.get(1 - termIxA).getCode()); assertEquals(1, vector.getValues().size()); ImgFeatureValuesDTO featureValuesDTO = vector.getValues().get(0); assertNull(featureValuesDTO.getT()); assertNull(featureValuesDTO.getZ()); PlateFeatureValues values = featureValuesDTO.getValues(); - assertEquals(terms.get(0).getSequenceNumber(), (int) values.getForWellLocation(1, 1)); - assertEquals(terms.get(1).getSequenceNumber(), (int) values.getForWellLocation(1, 2)); - assertEquals(terms.get(0).getSequenceNumber(), (int) values.getForWellLocation(1, 3)); + assertEquals(terms.get(termIxA).getSequenceNumber(), (int) values.getForWellLocation(1, 1)); + assertEquals(terms.get(1 - termIxA).getSequenceNumber(), + (int) values.getForWellLocation(1, 2)); + assertEquals(terms.get(termIxA).getSequenceNumber(), (int) values.getForWellLocation(1, 3)); } } \ No newline at end of file diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java index 325c65dbb01c7d6f3c6ae4a5bdd3fa9bc1e27df1..4cb76b3ce14eb69fde557a1bf32c7989f64a28ef 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java @@ -98,7 +98,7 @@ public class FeatureVectorUploaderTest extends AbstractDBTest private void verify() { - List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetId(datasetId); + List<ImgFeatureDefDTO> featureDefs = dao.listFeatureDefsByDataSetIds(datasetId); assertEquals(2, featureDefs.size()); count = 0; @@ -112,7 +112,7 @@ public class FeatureVectorUploaderTest extends AbstractDBTest private void verifyFeatureDef() { assertEquals(TEST_FEATURE_NAME + count, featureDef.getLabel()); - List<ImgFeatureValuesDTO> featureValuesList = dao.getFeatureValues(featureDef); + List<ImgFeatureValuesDTO> featureValuesList = dao.getFeatureValues(featureDef.getId()); assertEquals(1, featureValuesList.size()); featureValues = featureValuesList.get(0); verifyFeatureValues(); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHeatmapTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHeatmapTest.java index ef186bf518926ceb1a8e98e2bc81f7a1e57232e6..d6f80a39038333bb57512966ddbe8004262f3f0b 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHeatmapTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHeatmapTest.java @@ -64,7 +64,7 @@ public class TabularDataHeatmapTest extends AbstractTabularDataGraphTest assertTrue(outputFile.length() > 0); } - @Test + @Test(groups = "slow") public void testBigNumberHeatmap() throws IOException { File outputFile = getImageOutputFile(); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHistogramTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHistogramTest.java index 96c4234dfb35a888349a52b8f2699971b4b6e30e..fab88d536379a2c570b42f1b734894a8df946ddb 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHistogramTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/graph/TabularDataHistogramTest.java @@ -171,7 +171,7 @@ public class TabularDataHistogramTest extends AbstractTabularDataGraphTest assertTrue(outputFile.length() > 0); } - @Test + @Test(groups = "slow") public void testLotsOfBlanksHistogram() throws IOException { File outputFile = getImageOutputFile(); @@ -189,7 +189,7 @@ public class TabularDataHistogramTest extends AbstractTabularDataGraphTest assertTrue(outputFile.length() > 0); } - @Test + @Test(groups = "slow") public void testIncorrectlyConfiguredHistogram() throws IOException { File outputFile = getImageOutputFile(); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/ImageAnalysisMergedRowsReportingPluginTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/ImageAnalysisMergedRowsReportingPluginTest.java index 19a1e540f413718b9f46613ef4896a3633c291c9..4b60d41b665da3cbe9798909732ef410c553331d 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/ImageAnalysisMergedRowsReportingPluginTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/generic/server/plugins/ImageAnalysisMergedRowsReportingPluginTest.java @@ -86,68 +86,56 @@ public class ImageAnalysisMergedRowsReportingPluginTest extends AssertJUnit final DatasetDescription ds1 = new DatasetDescription(); ds1.setDatasetCode("ds1"); final ImgContainerDTO p1 = new ImgContainerDTO("p1", 3, 2, 0); + p1.setId(101); final SampleIdentifier p1Identifier = new SampleIdentifier(new SpaceIdentifier("1", "S"), "P1"); final DatasetDescription ds2 = new DatasetDescription(); ds2.setDatasetCode("ds2"); final ImgContainerDTO p2 = new ImgContainerDTO("p2", 2, 1, 0); + p2.setId(102); final SampleIdentifier p2Identifier = new SampleIdentifier(new SpaceIdentifier("1", "S"), "P2"); final ImgFeatureDefDTO ds1f1 = new ImgFeatureDefDTO("f1", "F1", "", 1); + ds1f1.setId(1); final ImgFeatureDefDTO ds1f2 = new ImgFeatureDefDTO("f2", "F2", "", 1); + ds1f2.setId(2); final ImgFeatureDefDTO ds2f2 = new ImgFeatureDefDTO("f2", "F2", "", 2); + ds2f2.setId(3); final ImgFeatureDefDTO ds2f3 = new ImgFeatureDefDTO("f3", "F3", "", 2); + ds2f3.setId(4); final ImgFeatureValuesDTO ds1f1Values = - createFeatureValues("12, 2.5", "24, 3.25", "-1.5, 42"); + createFeatureValues(ds1f1.getId(), "12, 2.5", "24, 3.25", "-1.5, 42"); final ImgFeatureValuesDTO ds1f2Values = - createFeatureValues("-3.5, 12.5", "-2, 1", "5, 4.25"); - final ImgFeatureValuesDTO ds2f2Values = createFeatureValues("23", "5.75"); - final ImgFeatureValuesDTO ds2f3Values = createFeatureValues("-9", "44.125"); + createFeatureValues(ds1f2.getId(), "-3.5, 12.5", "-2, 1", "5, 4.25"); + final ImgFeatureValuesDTO ds2f2Values = createFeatureValues(ds2f2.getId(), "23", "5.75"); + final ImgFeatureValuesDTO ds2f3Values = createFeatureValues(ds2f3.getId(), "-9", "44.125"); context.checking(new Expectations() { { - one(dao).tryGetDatasetByPermId(ds1.getDatasetCode()); - will(returnValue(createDataSet(1))); + one(dao).listDatasetsByPermId(ds1.getDatasetCode(), ds2.getDatasetCode()); + will(returnValue(Arrays.asList(createDataSet(1), createDataSet(2)))); - one(dao).listFeatureDefsByDataSetId(1); - will(returnValue(Arrays.asList(ds1f1, ds1f2))); + one(dao).listFeatureDefsByDataSetIds(1, 2); + will(returnValue(Arrays.asList(ds1f1, ds1f2, ds2f2, ds2f3))); - one(dao).listFeatureVocabularyTermsByDataSetId(1); + one(dao).listFeatureVocabularyTermsByDataSetId(1, 2); will(returnValue(new ArrayList<ImgFeatureVocabularyTermDTO>())); - one(dao).getContainerById(101); - will(returnValue(p1)); + one(dao).listContainersByIds(101, 102); + will(returnValue(Arrays.asList(p1, p2))); one(service).tryGetSampleIdentifier(p1.getPermId()); will(returnValue(p1Identifier)); - one(dao).getFeatureValues(ds1f1); - will(returnValue(Arrays.asList(ds1f1Values))); - - one(dao).getFeatureValues(ds1f2); - will(returnValue(Arrays.asList(ds1f2Values))); - - one(dao).tryGetDatasetByPermId(ds2.getDatasetCode()); - will(returnValue(createDataSet(2))); - - one(dao).listFeatureDefsByDataSetId(2); - will(returnValue(Arrays.asList(ds2f2, ds2f3))); - - one(dao).listFeatureVocabularyTermsByDataSetId(2); - will(returnValue(new ArrayList<ImgFeatureVocabularyTermDTO>())); - - one(dao).getContainerById(102); - will(returnValue(p2)); + one(dao).getFeatureValues(ds1f1Values.getFeatureDefId(), + ds1f2Values.getFeatureDefId(), ds2f2Values.getFeatureDefId(), + ds2f3Values.getFeatureDefId()); + will(returnValue(Arrays.asList(ds1f1Values, ds1f2Values, ds2f2Values, + ds2f3Values))); one(service).tryGetSampleIdentifier(p2.getPermId()); will(returnValue(p2Identifier)); - one(dao).getFeatureValues(ds2f2); - will(returnValue(Arrays.asList(ds2f2Values))); - - one(dao).getFeatureValues(ds2f3); - will(returnValue(Arrays.asList(ds2f3Values))); - } }); @@ -178,7 +166,7 @@ public class ImageAnalysisMergedRowsReportingPluginTest extends AssertJUnit return datasetDTO; } - private ImgFeatureValuesDTO createFeatureValues(String... rows) + private ImgFeatureValuesDTO createFeatureValues(long featureDefId, String... rows) { float[][] matrix = new float[rows[0].split(",").length][rows.length]; for (int i = 0; i < rows.length; i++) @@ -192,6 +180,6 @@ public class ImageAnalysisMergedRowsReportingPluginTest extends AssertJUnit } final MDFloatArray array = new MDFloatArray(matrix); return new ImgFeatureValuesDTO(0.0, 0.0, new PlateFeatureValues( - NativeTaggedArray.toByteArray(array)), 0L); + NativeTaggedArray.toByteArray(array)), featureDefId); } } diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java index 412db84bbd1252ea129d09ea16952721f7e5268f..a872a86b1be44711d8a791545f770703acfa4361 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java @@ -246,26 +246,15 @@ public class DssServiceRpcScreeningTest extends AssertJUnit { prepareAssetDataSetsAreAccessible(); prepareLockDataSet("ds1", "ds2"); - prepareGetFeatureDefinitions(1, "f1", "f2"); - prepareGetFeatureDefinitions(2, "f2", "f3"); - @SuppressWarnings("deprecation") - List<String> names = - screeningService.listAvailableFeatureNames(SESSION_TOKEN, Arrays.asList( - featureVectorDatasetIdentifier1, featureVectorDatasetIdentifier2)); - - assertEquals("[f1, f2, f3]", names.toString()); - assertTrue(testMethodInterceptor.methodInvoked); - context.assertIsSatisfied(); - } - - @Test - public void testAuthorization() - { - prepareAssetDataSetsAreAccessible(); - prepareLockDataSet("ds1", "ds2"); - prepareGetFeatureDefinitions(1, "f1", "f2"); - prepareGetFeatureDefinitions(2, "f2", "f3"); + long[] dataSetIDs = new long[] + { 1, 2 }; + String[][] featureCodesPerDataset = new String[][] + { + { "f1", "f2" }, + { "f2", "f3" } }; + prepareListDatasets(dataSetIDs); + prepareGetFeatureDefinitions(dataSetIDs, featureCodesPerDataset); List<String> names = screeningService.listAvailableFeatureCodes(SESSION_TOKEN, Arrays.asList( @@ -283,8 +272,16 @@ public class DssServiceRpcScreeningTest extends AssertJUnit prepareLockDataSet("ds1", "ds2"); FeatureVectorDatasetReference r1 = createFeatureVectorDatasetReference(DATASET_CODE); FeatureVectorDatasetReference r2 = createFeatureVectorDatasetReference("ds2"); - prepareCreateFeatureVectorDataSet(1, "F1", "F2"); - prepareCreateFeatureVectorDataSet(2, "F2"); + String[][] featureCodesPerDataset = new String[][] + { + { "F1", "F2" } }; + prepareLoadFeatures(new long[] + { 1 }, featureCodesPerDataset); + featureCodesPerDataset = new String[][] + { + { "F2" } }; + prepareLoadFeatures(new long[] + { 2 }, featureCodesPerDataset); List<FeatureVectorDataset> dataSets = screeningService.loadFeatures(SESSION_TOKEN, Arrays.asList(r1, r2), @@ -308,6 +305,15 @@ public class DssServiceRpcScreeningTest extends AssertJUnit context.assertIsSatisfied(); } + private void prepareLoadFeatures(long[] dataSetIDs, String[][] featureCodesPerDataset) + { + prepareListDatasets(dataSetIDs); + prepareListContainers(dataSetIDs); + prepareGetFeatureDefinitions(dataSetIDs, featureCodesPerDataset); + prepareGetFeatureVocabularyTerms(dataSetIDs); + prepareCreateFeatureVectorDataSet(dataSetIDs, featureCodesPerDataset); + } + @Test public void testLoadImages() throws IOException { @@ -590,71 +596,154 @@ public class DssServiceRpcScreeningTest extends AssertJUnit }); } - private void prepareCreateFeatureVectorDataSet(final long dataSetID, - final String... featureCodes) + private void prepareCreateFeatureVectorDataSet(final long[] dataSetIDs, + final String[]... featureCodesPerDataset) { - prepareGetFeatureDefinitions(dataSetID, featureCodes); - prepareGetFeatureVocabularyTerms(dataSetID); context.checking(new Expectations() { { - one(dao).getContainerById(100 + dataSetID); - will(returnValue(new ImgContainerDTO("12-34", 1, 2, 0))); + List<ImgFeatureValuesDTO> values = new ArrayList<ImgFeatureValuesDTO>(); + long[] featureDefIds = new long[countFeatureCodes(featureCodesPerDataset)]; + int featureDefIx = 0; + int datasetIx = 0; - one(service).tryToGetSampleIdentifier("12-34"); - will(returnValue(new SampleIdentifier(new SpaceIdentifier("1", "S"), "P1"))); + for (String[] featureCodes : featureCodesPerDataset) + { + long dataSetId = dataSetIDs[datasetIx++]; + for (String featureCode : featureCodes) + { + featureDefIds[featureDefIx] = getFeatureDefId(featureCode); + int offset = getFeatureDefId(featureCode); + PlateFeatureValues matrixValues = + new PlateFeatureValues(NativeTaggedArray + .toByteArray(new MDFloatArray(new float[][] + { + { 3.5f * dataSetId + offset }, + { 1.25f * dataSetId + offset } }))); + ImgFeatureValuesDTO value = + new ImgFeatureValuesDTO(0.0, 0.0, matrixValues, 0L); + value.setFeatureDefId(featureDefIds[featureDefIx]); + values.add(value); + + featureDefIx++; + } + } + one(dao).getFeatureValues(featureDefIds); + will(returnValue(values)); + } - for (String code : featureCodes) + }); + } + + private void prepareListContainers(final long[] dataSetIDs) + { + context.checking(new Expectations() + { + { + long[] containerIds = new long[dataSetIDs.length]; + List<ImgContainerDTO> containers = new ArrayList<ImgContainerDTO>(); + + for (int i = 0; i < dataSetIDs.length; i++) { - one(dao).getFeatureValues(new ImgFeatureDefDTO(code, code, "", 0)); - int offset = Integer.parseInt(code, 16); - PlateFeatureValues array = - new PlateFeatureValues(NativeTaggedArray - .toByteArray(new MDFloatArray(new float[][] - { - { 3.5f * dataSetID + offset }, - { 1.25f * dataSetID + offset } }))); - will(returnValue(Arrays - .asList(new ImgFeatureValuesDTO(0.0, 0.0, array, 0L)))); + long id = dataSetIDs[i]; + containerIds[i] = getContainerId(id); + ImgContainerDTO container = new ImgContainerDTO("12-34", 1, 2, 0); + container.setId(containerIds[i]); + containers.add(container); + + one(service).tryToGetSampleIdentifier("12-34"); + will(returnValue(new SampleIdentifier(new SpaceIdentifier("1", "S"), "P1"))); } + + one(dao).listContainersByIds(containerIds); + will(returnValue(containers)); + } + }); + } + + private void prepareListDatasets(final long[] dataSetIDs) + { + context.checking(new Expectations() + { + { + String[] permIDs = new String[dataSetIDs.length]; + List<ImgDatasetDTO> dataSets = new ArrayList<ImgDatasetDTO>(); + + for (int i = 0; i < dataSetIDs.length; i++) + { + long id = dataSetIDs[i]; + permIDs[i] = "ds" + id; + + ImgDatasetDTO dataSet = + new ImgDatasetDTO(permIDs[i], null, null, getContainerId(id), false); + dataSet.setId(id); + dataSets.add(dataSet); + } + + one(dao).listDatasetsByPermId(permIDs); + will(returnValue(dataSets)); } }); } - private void prepareGetFeatureDefinitions(final long dataSetID, final String... featureCodes) + private void prepareGetFeatureDefinitions(final long[] dataSetIDs, + final String[]... featureCodesPerDataset) { context.checking(new Expectations() { { - String permID = "ds" + dataSetID; - one(dao).tryGetDatasetByPermId(permID); - ImgDatasetDTO dataSet = - new ImgDatasetDTO(permID, null, null, 100 + dataSetID, false); - dataSet.setId(dataSetID); - will(returnValue(dataSet)); - - one(dao).listFeatureDefsByDataSetId(dataSetID); List<ImgFeatureDefDTO> defs = new ArrayList<ImgFeatureDefDTO>(); - for (String code : featureCodes) + int datasetIx = 0; + for (String[] featureCodes : featureCodesPerDataset) { - defs.add(new ImgFeatureDefDTO(code, code, "", 0)); + long dataSetID = dataSetIDs[datasetIx]; + for (String code : featureCodes) + { + ImgFeatureDefDTO def = new ImgFeatureDefDTO(code, code, "", 0); + def.setDataSetId(dataSetID); + def.setId(getFeatureDefId(code)); + defs.add(def); + } + datasetIx++; } + + one(dao).listFeatureDefsByDataSetIds(dataSetIDs); will(returnValue(defs)); } }); } - private void prepareGetFeatureVocabularyTerms(final long dataSetID) + private void prepareGetFeatureVocabularyTerms(final long[] dataSetIDs) { context.checking(new Expectations() { { - one(dao).listFeatureVocabularyTermsByDataSetId(dataSetID); + one(dao).listFeatureVocabularyTermsByDataSetId(dataSetIDs); will(returnValue(new ArrayList<ImgFeatureVocabularyTermDTO>())); } }); } + private static long getContainerId(long datasetId) + { + return datasetId + 100; + } + + private static int countFeatureCodes(String[][] featureCodesPerDataset) + { + int counter = 0; + for (String[] featureCodes : featureCodesPerDataset) + { + counter += featureCodes.length; + } + return counter; + } + + private static int getFeatureDefId(String code) + { + return Integer.parseInt(code, 16); + } + private void prepareAssetDataSetIsAccessible(final String dsCode) { context.checking(new Expectations() diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/FeatureVectorDatasetLoaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/FeatureVectorDatasetLoaderTest.java index 99b15b63391805454f63289c22367faca1fe3456..a205f6ce48e26ed701a84b13e564a904bcb954aa 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/FeatureVectorDatasetLoaderTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/FeatureVectorDatasetLoaderTest.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -39,8 +40,6 @@ import ch.systemsx.cisd.openbis.plugin.screening.server.IScreeningBusinessObject import ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto.PlateIdentifier; /** - * - * * @author Franz-Josef Elmer */ public class FeatureVectorDatasetLoaderTest extends AbstractServerTestCase @@ -52,18 +51,28 @@ public class FeatureVectorDatasetLoaderTest extends AbstractServerTestCase { screeningBOFactory = context.mock(IScreeningBusinessObjectFactory.class); } - + @Test public void testGetFeatureVectorDatasets() { final RecordingMatcher<ListOrSearchSampleCriteria> recordingCriteriaMatcher = new RecordingMatcher<ListOrSearchSampleCriteria>(); - final ExternalData ids1 = new DataSetBuilder(1l).code("ids1").type("HCS_IMAGE").getDataSet(); - final ExternalData fds1 = new DataSetBuilder(11l).code("fds1").type("HCS_ANALYSIS_WELL_FEATURES").getDataSet(); - final ExternalData fds2 = new DataSetBuilder(12l).code("fds2").type("HCS_ANALYSIS_WELL_FEATURES").getDataSet(); - final ExternalData ids2 = new DataSetBuilder(2l).code("ids2").type("HCS_IMAGE").getDataSet(); - final ExternalData fds3 = new DataSetBuilder(21l).code("fds3").type("HCS_ANALYSIS_WELL_FEATURES").getDataSet(); - final ExternalData fds4 = new DataSetBuilder(100l).code("fds4").type("HCS_ANALYSIS_WELL_FEATURES").getDataSet(); + final ExternalData ids1 = + new DataSetBuilder(1l).code("ids1").type("HCS_IMAGE").getDataSet(); + final ExternalData fds1 = + new DataSetBuilder(11l).code("fds1").type("HCS_ANALYSIS_WELL_FEATURES") + .getDataSet(); + final ExternalData fds2 = + new DataSetBuilder(12l).code("fds2").type("HCS_ANALYSIS_WELL_FEATURES") + .getDataSet(); + final ExternalData ids2 = + new DataSetBuilder(2l).code("ids2").type("HCS_IMAGE").getDataSet(); + final ExternalData fds3 = + new DataSetBuilder(21l).code("fds3").type("HCS_ANALYSIS_WELL_FEATURES") + .getDataSet(); + final ExternalData fds4 = + new DataSetBuilder(100l).code("fds4").type("HCS_ANALYSIS_WELL_FEATURES") + .getDataSet(); final ExternalData ds1 = new DataSetBuilder(101l).code("ds1").type("BLABLA").getDataSet(); context.checking(new Expectations() { @@ -72,18 +81,18 @@ public class FeatureVectorDatasetLoaderTest extends AbstractServerTestCase will(returnValue(sampleLister)); allowing(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - + one(sampleLister).list(with(recordingCriteriaMatcher)); ArrayList<Sample> samples = new ArrayList<Sample>(); samples.add(new SampleBuilder("/S/P1").id(42l).permID("s-1").getSample()); will(returnValue(samples)); - - one(datasetLister).listBySampleIds(Arrays.asList(42l)); + + one(datasetLister).listBySampleIds(new HashSet<Long>(Arrays.asList(42l))); will(returnValue(Arrays.asList(ids1, fds1, fds2, ids2, fds3, fds4, ds1))); - + one(datasetLister).listByParentTechIds(Arrays.asList(1l, 2l)); will(returnValue(Arrays.asList(fds1, fds2, fds3, ds1))); - + one(datasetLister).listParentIds(Arrays.asList(11l, 12l, 21l)); HashMap<Long, Set<Long>> map = new HashMap<Long, Set<Long>>(); map.put(11l, Collections.singleton(1l)); @@ -96,11 +105,15 @@ public class FeatureVectorDatasetLoaderTest extends AbstractServerTestCase Arrays.<PlateIdentifier> asList(new PlateIdentifier("P1", "S", "s-1")); FeatureVectorDatasetLoader loader = new FeatureVectorDatasetLoader(SESSION, screeningBOFactory, null, plateIdentifiers); - - List<ExternalData> datasets = new ArrayList<ExternalData>(loader.getFeatureVectorDatasets()); - - assertEquals("[]", Arrays.asList(recordingCriteriaMatcher.recordedObject().trySampleCodes()).toString()); - assertEquals("[s-1]", Arrays.asList(recordingCriteriaMatcher.recordedObject().trySamplePermIds()).toString()); + + List<ExternalData> datasets = + new ArrayList<ExternalData>(loader.getFeatureVectorDatasets()); + + assertEquals("[]", Arrays + .asList(recordingCriteriaMatcher.recordedObject().trySampleCodes()).toString()); + assertEquals("[s-1]", + Arrays.asList(recordingCriteriaMatcher.recordedObject().trySamplePermIds()) + .toString()); Collections.sort(datasets, new Comparator<ExternalData>() { public int compare(ExternalData o1, ExternalData o2) diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/HCSImageDatasetLoaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/HCSImageDatasetLoaderTest.java index 8ad5b2c618ae8be5591395974b29cb9e3bfaeac0..4bb9ead87afe95f7810efc01f2c9db03f7aab4af 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/HCSImageDatasetLoaderTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/HCSImageDatasetLoaderTest.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -45,16 +46,16 @@ import ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto.PlateIdentifi import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ScreeningConstants; /** - * - * * @author Franz-Josef Elmer */ public class HCSImageDatasetLoaderTest extends AbstractServerTestCase { private static final String STORE_CODE = "store-1"; + private static final String DOWNLOAD_URL = "http://download"; + private static final String HOST_URL = "http://host"; - + private IScreeningBusinessObjectFactory screeningBOFactory; @BeforeMethod @@ -62,7 +63,7 @@ public class HCSImageDatasetLoaderTest extends AbstractServerTestCase { screeningBOFactory = context.mock(IScreeningBusinessObjectFactory.class); } - + private DataSetBuilder dataSet(long id) { DataStoreBuilder dataStoreBuilder = new DataStoreBuilder(STORE_CODE); @@ -70,24 +71,34 @@ public class HCSImageDatasetLoaderTest extends AbstractServerTestCase Experiment experiment = new ExperimentBuilder().identifier("/S/P/E1").getExperiment(); return new DataSetBuilder(id).store(dataStoreBuilder.getStore()).experiment(experiment); } - + @Test public void testGetSegmentationImageDatasetReferences() { final RecordingMatcher<ListOrSearchSampleCriteria> recordingCriteriaMatcher = - new RecordingMatcher<ListOrSearchSampleCriteria>(); + new RecordingMatcher<ListOrSearchSampleCriteria>(); SampleBuilder sampleBuilder = new SampleBuilder("/S/P1").id(42l).permID("s-1"); VocabularyTerm value = new VocabularyTerm(); value.setCode("96_WELLS_8X12"); sampleBuilder.property(ScreeningConstants.PLATE_GEOMETRY).value(value); final Sample sample = sampleBuilder.getSample(); - final ExternalData ids1 = dataSet(1l).code("ids1").type("HCS_IMAGE").sample(sample).getDataSet(); - final ExternalData sds1 = dataSet(11l).code("sds1").type("HCS_IMAGE_SEGMENTATION").sample(sample).getDataSet(); - final ExternalData sds2 = dataSet(12l).code("sds2").type("HCS_IMAGE_SEGMENTATION").sample(sample).getDataSet(); - final ExternalData ids2 = dataSet(2l).code("ids2").type("HCS_IMAGE").sample(sample).getDataSet(); - final ExternalData sds3 = dataSet(21l).code("sds3").type("HCS_IMAGE_SEGMENTATION").sample(sample).getDataSet(); - final ExternalData sds4 = dataSet(100l).code("sds4").type("HCS_IMAGE_SEGMENTATION").getDataSet(); - final ExternalData ds1 = dataSet(101l).code("ds1").type("BLABLA").sample(sample).getDataSet(); + final ExternalData ids1 = + dataSet(1l).code("ids1").type("HCS_IMAGE").sample(sample).getDataSet(); + final ExternalData sds1 = + dataSet(11l).code("sds1").type("HCS_IMAGE_SEGMENTATION").sample(sample) + .getDataSet(); + final ExternalData sds2 = + dataSet(12l).code("sds2").type("HCS_IMAGE_SEGMENTATION").sample(sample) + .getDataSet(); + final ExternalData ids2 = + dataSet(2l).code("ids2").type("HCS_IMAGE").sample(sample).getDataSet(); + final ExternalData sds3 = + dataSet(21l).code("sds3").type("HCS_IMAGE_SEGMENTATION").sample(sample) + .getDataSet(); + final ExternalData sds4 = + dataSet(100l).code("sds4").type("HCS_IMAGE_SEGMENTATION").getDataSet(); + final ExternalData ds1 = + dataSet(101l).code("ds1").type("BLABLA").sample(sample).getDataSet(); context.checking(new Expectations() { { @@ -101,7 +112,7 @@ public class HCSImageDatasetLoaderTest extends AbstractServerTestCase samples.add(sample); will(returnValue(samples)); - one(datasetLister).listBySampleIds(Arrays.asList(42l)); + one(datasetLister).listBySampleIds(new HashSet<Long>(Arrays.asList(42l))); will(returnValue(Arrays.asList(ids1, sds1, sds2, ids2, sds3, sds4, ds1))); one(datasetLister).listByParentTechIds(Arrays.asList(1l, 2l)); @@ -135,11 +146,14 @@ public class HCSImageDatasetLoaderTest extends AbstractServerTestCase } }); assertEquals("sds1 (plate: /S/P1 [s-1])", references.get(0).toString()); - assertEquals("ids1 (plate: /S/P1 [s-1])", references.get(0).getParentImageDatasetReference().toString()); + assertEquals("ids1 (plate: /S/P1 [s-1])", references.get(0) + .getParentImageDatasetReference().toString()); assertEquals("sds2 (plate: /S/P1 [s-1])", references.get(1).toString()); - assertEquals("ids1 (plate: /S/P1 [s-1])", references.get(1).getParentImageDatasetReference().toString()); + assertEquals("ids1 (plate: /S/P1 [s-1])", references.get(1) + .getParentImageDatasetReference().toString()); assertEquals("sds3 (plate: /S/P1 [s-1])", references.get(2).toString()); - assertEquals("ids2 (plate: /S/P1 [s-1])", references.get(2).getParentImageDatasetReference().toString()); + assertEquals("ids2 (plate: /S/P1 [s-1])", references.get(2) + .getParentImageDatasetReference().toString()); assertEquals(3, references.size()); context.assertIsSatisfied(); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/ScreeningApiImplTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/ScreeningApiImplTest.java index 25288f98a67b29cccf5c869e1b1067ce3da4cfbb..55cca8692e1a0c92bfd1b218cddada7f6575185f 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/ScreeningApiImplTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/ScreeningApiImplTest.java @@ -150,6 +150,11 @@ public class ScreeningApiImplTest extends AbstractServerTestCase context.assertIsSatisfied(); } + private static HashSet<Long> asSet(long id) + { + return new HashSet<Long>(Arrays.asList(id)); + } + @Test public void testListImageDatasets() { @@ -165,7 +170,7 @@ public class ScreeningApiImplTest extends AbstractServerTestCase one(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - one(datasetLister).listBySampleIds(with(Arrays.asList((long) 1))); + one(datasetLister).listBySampleIds(with(asSet(1))); will(returnValue(Arrays.asList(imageDataSet(p1, "1", 1), imageAnalysisDataSet(p1, "2", 2)))); } @@ -199,7 +204,7 @@ public class ScreeningApiImplTest extends AbstractServerTestCase one(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - one(datasetLister).listBySampleIds(with(Arrays.asList((long) 1))); + one(datasetLister).listBySampleIds(with(asSet(1))); will(returnValue(Arrays.asList(imageRawDataSet(p1, "1", 1), imageRawDataSet(p1, "2", 2), imageAnalysisDataSet(p1, "3", 3)))); } @@ -240,17 +245,17 @@ public class ScreeningApiImplTest extends AbstractServerTestCase allowing(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - one(datasetLister).listBySampleIds(with(Arrays.asList((long) 1))); + one(datasetLister).listBySampleIds(with(asSet(1))); final ExternalData rawImage = imageRawDataSet(p1, "2", 2); - ExternalData imageSegmentationDataSet = imageSegmentationDataSet(p1, "3", 3, rawImage); + ExternalData imageSegmentationDataSet = + imageSegmentationDataSet(p1, "3", 3, rawImage); ExternalData imageAnalysisDataSet = imageAnalysisDataSet(p1, "4", 4); will(returnValue(Arrays.asList(imageDataSet(p1, "1", 1), rawImage, - imageSegmentationDataSet, - imageAnalysisDataSet))); - + imageSegmentationDataSet, imageAnalysisDataSet))); + one(datasetLister).listByParentTechIds(Arrays.asList(1l, 2l)); will(returnValue(Arrays.asList(imageSegmentationDataSet, imageAnalysisDataSet))); - + one(datasetLister).listParentIds(Arrays.asList(3l)); HashMap<Long, Set<Long>> result = new HashMap<Long, Set<Long>>(); result.put(3l, Collections.singleton(2l)); @@ -295,7 +300,7 @@ public class ScreeningApiImplTest extends AbstractServerTestCase one(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - one(datasetLister).listBySampleIds(with(Arrays.asList((long) 1))); + one(datasetLister).listBySampleIds(with(asSet(1))); will(returnValue(Arrays.asList(imageDataSet(p1, "1", 1)))); } }); @@ -356,7 +361,8 @@ public class ScreeningApiImplTest extends AbstractServerTestCase exactly(2).of(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); long imageDatasetId = 1; - one(datasetLister).listBySampleIds(with(Arrays.asList(imageDatasetId))); + one(datasetLister).listBySampleIds( + with(new HashSet<Long>(asSet(imageDatasetId)))); will(returnValue(Arrays.asList( imageDataSet(p1, "" + imageDatasetId, imageDatasetId), imageAnalysisDataSet(p1, "2", 2)))); @@ -411,7 +417,7 @@ public class ScreeningApiImplTest extends AbstractServerTestCase one(screeningBOFactory).createDatasetLister(SESSION); will(returnValue(datasetLister)); - one(datasetLister).listBySampleIds(with(Arrays.asList((long) 1))); + one(datasetLister).listBySampleIds(with(asSet(1))); will(returnValue(Arrays.asList(imageDataSet(p1, "1", 1)))); } }); diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoaderTest.java index 5afa9de28219954b76d5dcb05a86bd11a29e126c..5ef39a206fc1f79b3773f5655ae3059b72d56f3f 100644 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoaderTest.java +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/server/logic/WellContentLoaderTest.java @@ -52,7 +52,7 @@ public class WellContentLoaderTest extends AbstractScreeningDAOTest @Autowired IGenericServer server; - @Test + @Test(groups = "slow") public void testLoadWellContentsWithProperties() { diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java deleted file mode 100644 index a7964cd8a580cf6b672d9a6b72b198bbb3644745..0000000000000000000000000000000000000000 --- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright 2010 ETH Zuerich, CISD - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ch.systemsx.cisd.openbis.plugin.screening.shared.imaging; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.jmock.Expectations; -import org.jmock.Mockery; -import org.testng.AssertJUnit; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import ch.systemsx.cisd.openbis.dss.generic.shared.utils.CodeAndLabelUtil; -import ch.systemsx.cisd.openbis.generic.shared.basic.dto.CodeAndLabel; -import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier; -import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier; -import ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto.Geometry; -import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.PlateUtils; -import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.FeatureValue; -import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.FeatureTableRow; -import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateFeatureValues; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.FeatureVectorLoader.IMetadataProvider; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.IImagingReadonlyQueryDAO; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgContainerDTO; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureDefDTO; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureValuesDTO; -import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureVocabularyTermDTO; - -/** - * Tests of {@link FeatureVectorLoader}. - * - * @author Franz-Josef Elmer - */ -public class FeatureTableBuilderTest extends AssertJUnit -{ - private static final int EXPERIMENT_ID = 42; - - private static final String DATA_SET_CODE1 = "ds1"; - - private static final String DATA_SET_CODE2 = "ds2"; - - private static final String DATA_SET_CODE3 = "ds3"; - - private Mockery context; - - private IMetadataProvider service; - - private IImagingReadonlyQueryDAO dao; - - @BeforeMethod - public void beforeMethod() - { - context = new Mockery(); - service = context.mock(IMetadataProvider.class); - dao = context.mock(IImagingReadonlyQueryDAO.class); - } - - @AfterMethod - public final void tearDown() - { - // The following line of code should also be called at the end of each test method. - // Otherwise one do not known which test failed. - context.assertIsSatisfied(); - } - - @Test - public void testNoFiltering() - { - prepareAddFeatureVectors(1, null, "<A>a", "<B>b"); - prepareAddFeatureVectors(2, null, "<B>beta", "c"); - prepareAddFeatureVectors(3, null, "<B>b"); - - FeatureVectorLoader builder = createBuilder(); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE1); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE2); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE3); - List<CodeAndLabel> codesAndLabels = builder.getCodesAndLabels(); - List<FeatureTableRow> rows = builder.createFeatureTableRows(); - - assertEquals("[<A> a, <B> b, <B> beta, <C> c]", codesAndLabels.toString()); - assertFeatureTableRow(DATA_SET_CODE1, "A1", "db:/s/S1", "1.5, 11.5, NaN, NaN", rows.get(0)); - assertFeatureTableRow(DATA_SET_CODE1, "A2", "db:/s/S1", "0.5, 10.5, NaN, NaN", rows.get(1)); - assertFeatureTableRow(DATA_SET_CODE2, "A1", "db:/s/S2", "NaN, NaN, 2.5, 12.5", rows.get(2)); - assertFeatureTableRow(DATA_SET_CODE2, "A2", "db:/s/S2", "NaN, NaN, 1.5, 11.5", rows.get(3)); - assertFeatureTableRow(DATA_SET_CODE3, "A1", "db:/s/S3", "NaN, 3.5, NaN, NaN", rows.get(4)); - assertFeatureTableRow(DATA_SET_CODE3, "A2", "db:/s/S3", "NaN, 2.5, NaN, NaN", rows.get(5)); - assertEquals(6, rows.size()); - context.assertIsSatisfied(); - } - - @Test - public void testFiltering() - { - prepareAddFeatureVectors(1, "B", "<A>a", "b"); - prepareAddFeatureVectors(2, "B", "<B>beta", "c"); - prepareAddFeatureVectors(3, "B", "b"); - - FeatureVectorLoader builder = createBuilder("B"); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE1); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE2); - builder.addFeatureVectorsOfDataSetOrDie(DATA_SET_CODE3); - List<CodeAndLabel> codesAndLabels = builder.getCodesAndLabels(); - List<FeatureTableRow> rows = builder.createFeatureTableRows(); - - assertEquals("[<B> b, <B> beta]", codesAndLabels.toString()); - assertFeatureTableRow(DATA_SET_CODE1, "A1", "db:/s/S1", "11.5, NaN", rows.get(0)); - assertFeatureTableRow(DATA_SET_CODE1, "A2", "db:/s/S1", "10.5, NaN", rows.get(1)); - assertFeatureTableRow(DATA_SET_CODE2, "A1", "db:/s/S2", "NaN, 2.5", rows.get(2)); - assertFeatureTableRow(DATA_SET_CODE2, "A2", "db:/s/S2", "NaN, 1.5", rows.get(3)); - assertFeatureTableRow(DATA_SET_CODE3, "A1", "db:/s/S3", "3.5, NaN", rows.get(4)); - assertFeatureTableRow(DATA_SET_CODE3, "A2", "db:/s/S3", "2.5, NaN", rows.get(5)); - assertEquals(6, rows.size()); - context.assertIsSatisfied(); - } - - private void prepareAddFeatureVectors(final int dataSetID, final String filteredCodeOrNull, - final String... featureCodesAndLabels) - { - context.checking(new Expectations() - { - { - String dataSetCode = "ds" + dataSetID; - one(dao).tryGetDatasetByPermId(dataSetCode); - long containerId = dataSetID + 100; - ImgDatasetDTO dataSet = - new ImgDatasetDTO(dataSetCode, null, null, containerId, false); - dataSet.setId(dataSetID); - will(returnValue(dataSet)); - - List<ImgFeatureDefDTO> defs = new ArrayList<ImgFeatureDefDTO>(); - Geometry geometry = Geometry.createFromCartesianDimensions(2, 1); - for (int i = 0; i < featureCodesAndLabels.length; i++) - { - String codeAndLabels = featureCodesAndLabels[i]; - CodeAndLabel codeAndTitle = CodeAndLabelUtil.create(codeAndLabels); - String title = codeAndTitle.getLabel(); - String code = codeAndTitle.getCode(); - if (filteredCodeOrNull == null || filteredCodeOrNull.equals(code)) - { - ImgFeatureDefDTO def = - new ImgFeatureDefDTO(title, code, title, dataSetID); - def.setId(2 * dataSetID); - defs.add(def); - one(dao).getFeatureValues(def); - PlateFeatureValues values = new PlateFeatureValues(geometry); - values.setForWellLocation(dataSetID + 10 * i + 0.5f, 1, 1); - values.setForWellLocation(dataSetID + 10 * i - 0.5f, 1, 2); - will(returnValue(Arrays.asList(new ImgFeatureValuesDTO(0.0, 0.0, - values, def.getId())))); - } - } - one(dao).listFeatureDefsByDataSetId(dataSetID); - will(returnValue(defs)); - - one(dao).listFeatureVocabularyTermsByDataSetId(dataSetID); - will(returnValue(new ArrayList<ImgFeatureVocabularyTermDTO>())); - - one(dao).getContainerById(containerId); - String samplePermID = "s" + containerId; - will(returnValue(new ImgContainerDTO(samplePermID, geometry.getNumberOfRows(), - geometry.getNumberOfColumns(), EXPERIMENT_ID))); - - one(service).tryGetSampleIdentifier(samplePermID); - will(returnValue(new SampleIdentifier(new SpaceIdentifier("db", "s"), "S" - + dataSetID))); - } - }); - } - - private void assertFeatureTableRow(String expectedDataSetCode, String expectedWell, - String expectedPlate, String expectedValues, FeatureTableRow row) - { - assertEquals(expectedDataSetCode, row.getDataSetCode()); - String rowLetter = - PlateUtils.translateRowNumberIntoLetterCode(row.getWellLocation().getRow()); - assertEquals(expectedWell, rowLetter + row.getWellLocation().getColumn()); - assertEquals(expectedPlate, row.getPlateIdentifier().toString()); - assertEquals(expectedValues, render(row.getFeatureValues())); - } - - private String render(FeatureValue[] values) - { - StringBuilder builder = new StringBuilder(); - for (FeatureValue value : values) - { - if (builder.length() > 0) - { - builder.append(", "); - } - builder.append(value); - } - return builder.toString(); - } - - private FeatureVectorLoader createBuilder(String... featureCodes) - { - return new FeatureVectorLoader(Arrays.asList(featureCodes), dao, service); - } - -} diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoaderTest.java new file mode 100644 index 0000000000000000000000000000000000000000..4b976ab973a02703c20bca8945d00e8c5a05e95e --- /dev/null +++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureVectorLoaderTest.java @@ -0,0 +1,344 @@ +/* + * Copyright 2010 ETH Zuerich, CISD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ch.systemsx.cisd.openbis.plugin.screening.shared.imaging; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.jmock.Expectations; +import org.jmock.Mockery; +import org.testng.AssertJUnit; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import ch.systemsx.cisd.openbis.dss.generic.shared.utils.CodeAndLabelUtil; +import ch.systemsx.cisd.openbis.generic.shared.basic.dto.CodeAndLabel; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier; +import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SpaceIdentifier; +import ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto.Geometry; +import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.PlateUtils; +import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.FeatureValue; +import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.FeatureTableRow; +import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateFeatureValues; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.FeatureVectorLoader.IMetadataProvider; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.IImagingReadonlyQueryDAO; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgContainerDTO; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureDefDTO; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureValuesDTO; +import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureVocabularyTermDTO; + +/** + * Tests of {@link FeatureVectorLoader}. + * + * @author Franz-Josef Elmer + */ +public class FeatureVectorLoaderTest extends AssertJUnit +{ + private static final int EXPERIMENT_ID = 42; + + private static final String DATA_SET_CODE1 = "ds1"; + + private static final String DATA_SET_CODE2 = "ds2"; + + private static final String DATA_SET_CODE3 = "ds3"; + + private Mockery context; + + private IMetadataProvider service; + + private IImagingReadonlyQueryDAO dao; + + @BeforeMethod + public void beforeMethod() + { + context = new Mockery(); + service = context.mock(IMetadataProvider.class); + dao = context.mock(IImagingReadonlyQueryDAO.class); + } + + @AfterMethod + public final void tearDown() + { + // The following line of code should also be called at the end of each test method. + // Otherwise one do not known which test failed. + context.assertIsSatisfied(); + } + + @Test + public void testNoFiltering() + { + long[] dataSetIDs = new long[] + { 1, 2, 3 }; + String[][] featureCodesPerDataset = new String[][] + { + { "<A>a", "<B>b" }, + { "<B>beta", "c" }, + { "<B>b" } }; + prepareLoadFeatures(dataSetIDs, null, featureCodesPerDataset); + + FeatureVectorLoader builder = createBuilder(); + builder.addFeatureVectorsOfDataSetsOrDie(Arrays.asList(DATA_SET_CODE1, DATA_SET_CODE2, + DATA_SET_CODE3)); + List<CodeAndLabel> codesAndLabels = builder.getCodesAndLabels(); + List<FeatureTableRow> rows = builder.createFeatureTableRows(); + + assertEquals("[<A> a, <B> b, <B> beta, <C> c]", codesAndLabels.toString()); + assertFeatureTableRow(DATA_SET_CODE1, "A1", "db:/s/S1", "1.5, 11.5, NaN, NaN", rows.get(0)); + assertFeatureTableRow(DATA_SET_CODE1, "A2", "db:/s/S1", "0.5, 10.5, NaN, NaN", rows.get(1)); + assertFeatureTableRow(DATA_SET_CODE2, "A1", "db:/s/S2", "NaN, NaN, 2.5, 12.5", rows.get(2)); + assertFeatureTableRow(DATA_SET_CODE2, "A2", "db:/s/S2", "NaN, NaN, 1.5, 11.5", rows.get(3)); + assertFeatureTableRow(DATA_SET_CODE3, "A1", "db:/s/S3", "NaN, 3.5, NaN, NaN", rows.get(4)); + assertFeatureTableRow(DATA_SET_CODE3, "A2", "db:/s/S3", "NaN, 2.5, NaN, NaN", rows.get(5)); + assertEquals(6, rows.size()); + context.assertIsSatisfied(); + } + + @Test + public void testFiltering() + { + long[] dataSetIDs = new long[] + { 1, 2, 3 }; + String[][] featureCodesPerDataset = new String[][] + { + { "<A>a", "b" }, + { "<B>beta", "c" }, + { "b" } }; + prepareLoadFeatures(dataSetIDs, "B", featureCodesPerDataset); + + FeatureVectorLoader builder = createBuilder("B"); + builder.addFeatureVectorsOfDataSetsOrDie(Arrays.asList(DATA_SET_CODE1, DATA_SET_CODE2, + DATA_SET_CODE3)); + List<CodeAndLabel> codesAndLabels = builder.getCodesAndLabels(); + List<FeatureTableRow> rows = builder.createFeatureTableRows(); + + assertEquals("[<B> b, <B> beta]", codesAndLabels.toString()); + assertFeatureTableRow(DATA_SET_CODE1, "A1", "db:/s/S1", "11.5, NaN", rows.get(0)); + assertFeatureTableRow(DATA_SET_CODE1, "A2", "db:/s/S1", "10.5, NaN", rows.get(1)); + assertFeatureTableRow(DATA_SET_CODE2, "A1", "db:/s/S2", "NaN, 2.5", rows.get(2)); + assertFeatureTableRow(DATA_SET_CODE2, "A2", "db:/s/S2", "NaN, 1.5", rows.get(3)); + assertFeatureTableRow(DATA_SET_CODE3, "A1", "db:/s/S3", "3.5, NaN", rows.get(4)); + assertFeatureTableRow(DATA_SET_CODE3, "A2", "db:/s/S3", "2.5, NaN", rows.get(5)); + assertEquals(6, rows.size()); + context.assertIsSatisfied(); + } + + private void prepareLoadFeatures(long[] dataSetIDs, String filteredCodeOrNull, + String[][] featureCodesPerDataset) + { + Geometry geometry = Geometry.createFromCartesianDimensions(2, 1); + prepareListDatasets(dataSetIDs); + prepareListContainers(dataSetIDs, geometry); + prepareGetFeatureDefinitions(dataSetIDs, featureCodesPerDataset); + prepareGetFeatureVocabularyTerms(dataSetIDs); + prepareAddFeatureVectors(dataSetIDs, filteredCodeOrNull, geometry, featureCodesPerDataset); + } + + private void prepareAddFeatureVectors(final long dataSetIDs[], final String filteredCodeOrNull, + final Geometry geometry, final String[]... featureHeadersPerDataset) + { + context.checking(new Expectations() + { + { + List<ImgFeatureValuesDTO> values = new ArrayList<ImgFeatureValuesDTO>(); + List<Long> listedFeatureDefIds = new ArrayList<Long>(); + long featureId = 0; + int datasetIx = 0; + + for (String[] featureHeaders : featureHeadersPerDataset) + { + long dataSetId = dataSetIDs[datasetIx++]; + long datasetFeatureIx = 0; + for (String featureHeader : featureHeaders) + { + String featureCode = CodeAndLabelUtil.create(featureHeader).getCode(); + if (filteredCodeOrNull == null + || filteredCodeOrNull.equals(featureCode)) + { + listedFeatureDefIds.add(featureId); + + PlateFeatureValues matrixValues = new PlateFeatureValues(geometry); + matrixValues.setForWellLocation(dataSetId + 10 * datasetFeatureIx + + 0.5f, 1, 1); + matrixValues.setForWellLocation(dataSetId + 10 * datasetFeatureIx + - 0.5f, 1, 2); + + ImgFeatureValuesDTO value = + new ImgFeatureValuesDTO(0.0, 0.0, matrixValues, featureId); + values.add(value); + } + featureId++; + datasetFeatureIx++; + } + } + one(dao).getFeatureValues(asPrimitiveArray(listedFeatureDefIds)); + will(returnValue(values)); + } + + }); + } + + private static long[] asPrimitiveArray(List<Long> values) + { + long[] array = new long[values.size()]; + for (int i = 0; i < array.length; i++) + { + array[i] = values.get(i); + } + return array; + } + + private void prepareListContainers(final long[] dataSetIDs, final Geometry geometry) + { + context.checking(new Expectations() + { + { + long[] containerIds = new long[dataSetIDs.length]; + List<ImgContainerDTO> containers = new ArrayList<ImgContainerDTO>(); + + for (int i = 0; i < dataSetIDs.length; i++) + { + long id = dataSetIDs[i]; + containerIds[i] = getContainerId(id); + String samplePermID = "s" + containerIds[i]; + ImgContainerDTO container = + new ImgContainerDTO(samplePermID, geometry.getNumberOfRows(), + geometry.getNumberOfColumns(), EXPERIMENT_ID); + container.setId(containerIds[i]); + containers.add(container); + + one(service).tryGetSampleIdentifier(samplePermID); + will(returnValue(createSpaceIdentifier(id))); + } + + one(dao).listContainersByIds(containerIds); + will(returnValue(containers)); + } + + }); + } + + private void prepareListDatasets(final long[] dataSetIDs) + { + context.checking(new Expectations() + { + { + String[] permIDs = new String[dataSetIDs.length]; + List<ImgDatasetDTO> dataSets = new ArrayList<ImgDatasetDTO>(); + + for (int i = 0; i < dataSetIDs.length; i++) + { + long id = dataSetIDs[i]; + permIDs[i] = "ds" + id; + + ImgDatasetDTO dataSet = + new ImgDatasetDTO(permIDs[i], null, null, getContainerId(id), false); + dataSet.setId(id); + dataSets.add(dataSet); + } + + one(dao).listDatasetsByPermId(permIDs); + will(returnValue(dataSets)); + } + }); + } + + private void prepareGetFeatureDefinitions(final long[] dataSetIDs, + final String[]... featureCodesPerDataset) + { + context.checking(new Expectations() + { + { + List<ImgFeatureDefDTO> defs = new ArrayList<ImgFeatureDefDTO>(); + int datasetIx = 0; + int featureId = 0; + for (String[] featureCodes : featureCodesPerDataset) + { + long dataSetID = dataSetIDs[datasetIx]; + for (String featureCode : featureCodes) + { + CodeAndLabel codeAndTitle = CodeAndLabelUtil.create(featureCode); + String title = codeAndTitle.getLabel(); + String code = codeAndTitle.getCode(); + ImgFeatureDefDTO def = + new ImgFeatureDefDTO(title, code, title, dataSetID); + def.setId(featureId++); + defs.add(def); + } + datasetIx++; + } + + one(dao).listFeatureDefsByDataSetIds(dataSetIDs); + will(returnValue(defs)); + } + }); + } + + private void prepareGetFeatureVocabularyTerms(final long[] dataSetIDs) + { + context.checking(new Expectations() + { + { + one(dao).listFeatureVocabularyTermsByDataSetId(dataSetIDs); + will(returnValue(new ArrayList<ImgFeatureVocabularyTermDTO>())); + } + }); + } + + private static long getContainerId(long datasetId) + { + return datasetId + 100; + } + + private SampleIdentifier createSpaceIdentifier(long datasetId) + { + return new SampleIdentifier(new SpaceIdentifier("db", "s"), "S" + datasetId); + } + + private void assertFeatureTableRow(String expectedDataSetCode, String expectedWell, + String expectedPlate, String expectedValues, FeatureTableRow row) + { + assertEquals(expectedDataSetCode, row.getDataSetCode()); + String rowLetter = + PlateUtils.translateRowNumberIntoLetterCode(row.getWellLocation().getRow()); + assertEquals(expectedWell, rowLetter + row.getWellLocation().getColumn()); + assertEquals(expectedPlate, row.getPlateIdentifier().toString()); + assertEquals(expectedValues, render(row.getFeatureValues())); + } + + private String render(FeatureValue[] values) + { + StringBuilder builder = new StringBuilder(); + for (FeatureValue value : values) + { + if (builder.length() > 0) + { + builder.append(", "); + } + builder.append(value); + } + return builder.toString(); + } + + private FeatureVectorLoader createBuilder(String... featureCodes) + { + return new FeatureVectorLoader(Arrays.asList(featureCodes), dao, service); + } + +}