Skip to content
Snippets Groups Projects
Commit f93c8cca authored by tpylak's avatar tpylak
Browse files

LMS-2305 iBrain dropboxes have been moved to...

parent ef30f29d
No related branches found
No related tags found
No related merge requests found
Showing
with 0 additions and 681 deletions
#! /usr/bin/env python
import os
from java.io import File
class AbstractPropertiesParser:
_propertiesDict = None
def __init__(self, incoming, fileName):
path = os.path.join(incoming, fileName)
self._propertiesDict = self._parseMetadata(path)
# Returns: name of the file
def _findFile(self, incoming, prefix):
for file in os.listdir(incoming):
if file.startswith(prefix):
return file
raise Exception("No file with prefix '"+prefix+"' has been found!")
# Parses the metadata file from the given incoming directory.
# Each line should have a form:
# key = value
# Keys should be unique in the file.
# Returns:
# a dictionary with keys and values from the file.
def _parseMetadata(self, path):
f = open(path)
myDict = {}
for line in f:
line = line.strip()
if len(line) == 0 or line.startswith("#"):
continue
ix = line.find("=")
if ix == -1:
raise Exception("Cannot find '=' in line '"+line+"' in file: "+path)
key = line[:ix].strip()
value = line[ix+1:].strip()
if key in myDict:
raise Exception("Duplicated key '"+key+"' in file: "+path)
myDict[key] = value
return myDict
def get(self, propertyName):
return self._propertiesDict[propertyName]
# All properties in the file.
# Returns:
# an iterator which yields (propertyName, propertyValue) pairs
def getPropertiesIter(self):
return [ (key, value) for key, value in self._propertiesDict.iteritems() ]
# All dataset properties.
# Returns:
# an iterator which yields (propertyCode, propertyValue) pairs
def getDatasetPropertiesIter(self):
return [ (key, value) for key, value in self._propertiesDict.iteritems() if key.startswith(self.DATASET_PROPERTY_PREFIX) ]
class AbstractMetadataParser(AbstractPropertiesParser):
METADATA_FILE="metadata.properties"
IBRAIN2_DATASET_ID_PROPERTY = "ibrain2.dataset.id"
DATASET_PROPERTY_PREFIX = "ibrain2."
DATASET_TYPE_PROPERTY = "dataset.type"
def __init__(self, incoming):
AbstractPropertiesParser.__init__(self, incoming, self.METADATA_FILE)
def getDatasetType(self):
return self.get(self.DATASET_TYPE_PROPERTY)
def getIBrain2DatasetId(self):
return self.get(self.IBRAIN2_DATASET_ID_PROPERTY)
# --- concrete parser classes ----------------------
class AcquiredDatasetMetadataParser(AbstractMetadataParser):
PLATE_CODE_PRPOPERTY = "barcode"
INSTRUMENT_PROPERTY = "instrument.id"
TIMESTAMP_PROPERTY = "timestamp"
# All dataset properties.
# Returns:
# an iterator which yields (propertyCode, propertyValue) pairs
def getDatasetPropertiesIter(self):
properties = AbstractPropertiesParser.getDatasetPropertiesIter(self)
properties = [ (key, value) for (key, value) in properties if key != "ibrain2.assay.id" ]
properties.append((self.INSTRUMENT_PROPERTY, self.get(self.INSTRUMENT_PROPERTY)))
properties.append((self.TIMESTAMP_PROPERTY, self.get(self.TIMESTAMP_PROPERTY)))
return properties
def getPlateCode(self):
return self.get(self.PLATE_CODE_PRPOPERTY)
class DerivedDatasetMetadataParser(AbstractMetadataParser):
WORKFLOW_FILE_PREFIX = "workflow_"
PARENT_DATSASET_PERMID_PRPOPERTY = "storage_provider.parent.dataset.id"
DATASET_TYPE_PROPERTY = "dataset.type"
WORKFLOW_NAME_PROPERTY = "ibrain2.workflow.name"
WORKFLOW_AUTHOR_PROPERTY = "ibrain2.workflow.author"
_workflowName = None
_workflowAuthor = None
def __init__(self, incoming):
AbstractMetadataParser.__init__(self, incoming)
workflowFile = self._findFile(incoming, self.WORKFLOW_FILE_PREFIX)
basename = os.path.splitext(workflowFile)[0]
tokens = basename.split("_")
if len(tokens) < 3:
raise Exception("Cannot parse workflow name and author from: "+workflowFile)
self._workflowName = tokens[1]
self._workflowAuthor = tokens[2]
def getDatasetPropertiesIter(self):
properties = AbstractMetadataParser.getDatasetPropertiesIter(self)
properties.append((self.WORKFLOW_NAME_PROPERTY, self._workflowName))
properties.append((self.WORKFLOW_AUTHOR_PROPERTY, self._workflowAuthor))
return properties
def getParentDatasetPermId(self):
return self.get(self.PARENT_DATSASET_PERMID_PRPOPERTY)
def getDatasetType(self):
return self.get(self.DATASET_TYPE_PROPERTY)
class AssayParser(AbstractPropertiesParser):
ASSAY_FILE_PREFIX="assay_"
ASSAY_ID_PROPERTY = "assay.id"
ASSAY_TYPE_PROPERTY = "assay.type"
ASSAY_DESC_PROPERTY = "assay.description"
LAB_LEADER_PROPERTY = "labinfo.pi"
EXPERIMENTER_PROPERTY = "experimenter.login"
WORKFLOW_NAME_PROPERTY = "workflow.name"
WORKFLOW_AUTHOR_PROPERTY = "workflow.author"
def __init__(self, incoming):
AbstractPropertiesParser.__init__(self, incoming, self._findFile(incoming, self.ASSAY_FILE_PREFIX))
class RegistrationConfirmationUtils:
""" path to the registration confirmation directory relative to the incoming dataset """
CONFIRMATION_DIRECTORY = "registration-status"
STATUS_PROPERTY = "storage_provider.storage.status"
STATUS_OK = "STORAGE_SUCCESSFUL"
STATUS_ERROR = "STORAGE_FAILED"
ERROR_MSG_PROPERTY = "storage_provider.message"
OPENBIS_DATASET_ID_PROPERTY = "storage_provider.dataset.id"
IBRAIN2_STATUS_FILE_PREFIX = "ibrain2_dataset_id_"
IBRAIN2_STATUS_FILE_SUFFIX = ".properties"
def _getDestinationDir(self, incoming):
return File(incoming).getParentFile().getParentFile().getParent() + "/" + self.CONFIRMATION_DIRECTORY
def _getConfirmationFileName(self, ibrain2DatasetId):
return self.IBRAIN2_STATUS_FILE_PREFIX + ibrain2DatasetId + self.IBRAIN2_STATUS_FILE_SUFFIX
def _getStatusFilePath(self, ibrain2DatasetId, incoming):
return self._getDestinationDir(incoming) + "/" + self._getConfirmationFileName(ibrain2DatasetId)
def _prop(self, name, value):
return "" + name + " = " + value + "\n"
def _writeConfirmationFile(self, ibrain2DatasetId, fileContent, incoming):
confirmationFile = self._getStatusFilePath(ibrain2DatasetId, incoming)
self._writeFile(confirmationFile, fileContent)
def _writeFile(self, file, fileContent):
file = open(file, "w")
file.write(fileContent)
file.close()
def createSuccessStatus(self, ibrain2DatasetId, openbisDatasetId, incoming):
fileContent = self._prop(self.STATUS_PROPERTY, self.STATUS_OK)
fileContent += self._prop(AbstractMetadataParser.IBRAIN2_DATASET_ID_PROPERTY, ibrain2DatasetId)
fileContent += self._prop(self.OPENBIS_DATASET_ID_PROPERTY, openbisDatasetId)
self._writeConfirmationFile(ibrain2DatasetId, fileContent, incoming)
def createFailureStatus(self, ibrain2DatasetId, errorMessage, incoming):
fileContent = self._prop(self.STATUS_PROPERTY, self.STATUS_ERROR)
fileContent += self._prop(AbstractMetadataParser.IBRAIN2_DATASET_ID_PROPERTY, ibrain2DatasetId)
fileContent += self._prop(self.ERROR_MSG_PROPERTY, errorMessage)
self._writeConfirmationFile(ibrain2DatasetId, fileContent, incoming)
# --------------
def setImageDatasetPropertiesAndRegister(imageDataset, metadataParser, incoming, service, factory):
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
imageRegistrationDetails = factory.createImageRegistrationDetails(imageDataset, incoming)
for propertyCode, value in metadataParser.getDatasetPropertiesIter():
imageRegistrationDetails.setPropertyValue(propertyCode, value)
tr = service.transaction(incoming, factory)
dataset = tr.createNewDataSet(imageRegistrationDetails)
dataset.setParentDatasets([metadataParser.getParentDatasetPermId()])
imageDataSetFolder = tr.moveFile(incoming.getPath(), dataset)
if tr.commit():
createSuccessStatus(iBrain2DatasetId, dataset, incoming.getPath())
def registerDerivedBlackBoxDataset(state, service, factory, incoming, metadataParser, datasetType, fileFormatType):
transaction = service.transaction(incoming, factory)
dataset = transaction.createNewDataSet()
dataset.setDataSetType(datasetType)
dataset.setFileFormatType(fileFormatType)
registerDerivedDataset(state, transaction, dataset, incoming, metadataParser)
def registerDerivedDataset(state, transaction, dataset, incoming, metadataParser):
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
openbisDatasetParent = metadataParser.getParentDatasetPermId()
(space, plate) = tryGetConnectedPlate(state, openbisDatasetParent, iBrain2DatasetId, incoming.getPath())
if plate == None:
return
dataset.setSample(transaction.getSample('/'+space+'/'+plate))
dataset.setMeasuredData(False)
for propertyCode, value in metadataParser.getDatasetPropertiesIter():
dataset.setPropertyValue(propertyCode, value)
dataset.setParentDatasets([metadataParser.getParentDatasetPermId()])
transaction.moveFile(incoming.getPath(), dataset)
if transaction.commit():
createSuccessStatus(iBrain2DatasetId, dataset, incoming.getPath())
def findCSVFile(dir):
for file in os.listdir(dir):
if file.endswith(".csv"):
return dir + "/" + file
raise Exception("No CSV file has been found in "+dir)
"""
Returns:
(plateSpace, plateCode) tuple for the plate connected with the specified dataset
or (None, None) if the dataset does not exist or is not connected to the plate.
"""
def tryGetConnectedPlate(state, openbisDatasetId, iBrain2DatasetId, incomingPath):
openbis = state.getOpenBisService()
dataset = openbis.tryGetDataSet(openbisDatasetId)
if dataset != None:
plate = dataset.getSample()
if plate != None:
return (plate.getSpace().getCode(), plate.getCode())
else:
errorMsg = "No plate is connected to the dataset: "+openbisDatasetId+"."
else:
errorMsg = "Dataset does not exist or is not accessible: "+openbisDatasetId+". Maybe the dataset has not been registered yet. Try again later."
print errorMsg
RegistrationConfirmationUtils().createFailureStatus(iBrain2DatasetId, errorMsg, incomingPath)
return (None, None)
def createSuccessStatus(iBrain2DatasetId, dataset, incomingPath):
datasetCode = dataset.getDataSetCode()
RegistrationConfirmationUtils().createSuccessStatus(iBrain2DatasetId, datasetCode, incomingPath)
def createFailureStatus(iBrain2DatasetId, throwable, incoming):
msg = throwable.getMessage()
if msg == None:
msg = throwable.toString()
RegistrationConfirmationUtils().createFailureStatus(iBrain2DatasetId, msg, incoming.getPath())
# -------------- TODO: remove tests
TEST_DIR = "/Users/tpylak/main/src/screening-demo/biozentrum/dropboxes/ibrain2-dropboxes-test"
def testMetadataParsers():
print "-- acquired ---------------------------------"
parser = AcquiredDatasetMetadataParser(TEST_DIR+"/HCS_IMAGE_RAW/ibrain2_dataset_id_32")
print "dataset type:", parser.getDatasetType()
print "plate:", parser.getPlateCode()
print "properties"
for key, value in parser.getDatasetPropertiesIter():
print key, value
print "\n-- derived ---------------------------------"
parser = DerivedDatasetMetadataParser(TEST_DIR+"/HCS_IMAGE_OVERVIEW/ibrain2_dataset_id_48")
print "dataset type:", parser.getDatasetType()
print "parent perm id:", parser.getParentDatasetPermId()
print "properties"
for key, value in parser.getDatasetPropertiesIter():
print key, value
def testAssayParsers():
print "-- assay ---------------------------------"
parser = AssayParser(TEST_DIR+"/HCS_IMAGE_RAW/ibrain2_dataset_id_32")
print "properties"
for key, value in parser.getPropertiesIter():
print key, value
def testConfirmationFiles():
IBRAIN2Utils().createSuccessStatus("123", "123123123123-12312", TEST_DIR+"/HCS_IMAGE_RAW/ibrain2_dataset_id_32")
IBRAIN2Utils().createFailureStatus("321", "Global catastrophy!", TEST_DIR+"/HCS_IMAGE_RAW/ibrain2_dataset_id_32")
#testAssayParsers()
#testMetadataParsers()
#testConfirmationFiles()
#! /usr/bin/env python
import os
from ch.systemsx.cisd.openbis.dss.etl.dto.api.v1 import *
from ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto import Geometry
class IBrain2ImageDataSetConfig(SimpleImageDataConfig):
THUMBANAIL_SIZE = 200
def extractImageMetadata(self, imagePath):
basename = self.getFileBasename(imagePath)
token_dict = {}
for token in basename.split("_"):
token_dict[token[:1]] = token[1:]
image_tokens = ImageMetadata()
image_tokens.well = token_dict["w"]
image_tokens.tileNumber = self.fieldAsInt(token_dict["s"], basename)
image_tokens.channelCode = self.extractChannelCode(token_dict, basename)
return image_tokens
def extractChannelCode(self, token_dict, basename):
return token_dict["c"]
def getFileBasename(self, filePath):
return os.path.splitext(filePath)[0]
def fieldAsInt(self, fieldText, basename):
try:
return int(fieldText)
except ValueError:
raise Exception("Cannot parse field number from '" + fieldText + "' in '" + basename + "' file name.")
def geom(self, row, col):
return Geometry.createFromRowColDimensions(row, col)
"""
Parameters:
image_tokens_list - list of ImageTokens
Returns: (rows, columns) tuple describing the matrix of tiles (aka fields or sides) in the well
"""
def getTileGeometry(self, imageTokens, maxTileNumber):
# if a number of tiles is strange, assume that one tile is missing
if maxTileNumber == 5 or maxTileNumber == 7 or maxTileNumber == 11 or maxTileNumber == 13:
maxTileNumber = maxTileNumber + 1
if maxTileNumber % 4 == 0 and maxTileNumber != 4:
return self.geom(4, maxTileNumber / 4) # (4,2), (4,4)
elif maxTileNumber % 3 == 0:
return self.geom(maxTileNumber / 3, 3) # (3,3), (4,3), (5,3)
elif maxTileNumber % 2 == 0:
return self.geom(maxTileNumber / 2, 2) # (2,2), (3,2), (5,2), (7,2)
else:
return self.geom(maxTileNumber, 1)
class IBrain2SegmentationImageDataSetConfig(IBrain2ImageDataSetConfig):
def extractChannelCode(self, token_dict, basename):
return basename.split("_")[-1] + " ("+ token_dict["c"] + ")"
\ No newline at end of file
#! /usr/bin/env python
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incoming.getPath())
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
commonDropbox.registerDerivedBlackBoxDataset(state, service, factory, incoming, metadataParser, 'HCS_ANALYSIS_CELL_FEATURES_CSV', 'CSV')
\ No newline at end of file
#! /usr/bin/env python
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incoming.getPath())
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
commonDropbox.registerDerivedBlackBoxDataset(state, service, factory, incoming, metadataParser, 'HCS_ANALYSIS_CELL_FEATURES_MAT', 'MAT')
#! /usr/bin/env python
# This is an example Jython dropbox for importing feature vectors coming from analysis of image datasets
from ch.systemsx.cisd.openbis.dss.etl.dto.api.v1 import *
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
SEPARATOR = ","
# Specific code which defines the feature vector values for the dataset.
# Usually you will parse the content of the incoming file or directory to get the values.
# Parameters
# incomingCsvPath: path which points to the incoming CSV file
def defineFeatures(incomingCsvFile):
featuresBuilder = factory.createFeaturesBuilder()
file = open(incomingCsvFile)
for header in file:
headerTokens = header.split(SEPARATOR)
featureCode = headerTokens[0]
featureValues = featuresBuilder.defineFeature(featureCode)
for rowValues in file:
rowTokens = rowValues.split(SEPARATOR)
rowLabel = rowTokens[0].strip()
if len(rowLabel) == 0:
break
for column in range(1,len(headerTokens)):
value = rowTokens[column].strip()
well = rowLabel + str(column)
featureValues.addValue(well, value)
return featuresBuilder
def register(incomingPath):
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incomingPath)
global iBrain2DatasetId
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
transaction = service.transaction(incoming, factory)
incomingCsvFile = commonDropbox.findCSVFile(incomingPath)
featuresBuilder = defineFeatures(incomingCsvFile)
analysisRegistrationDetails = factory.createFeatureVectorRegistrationDetails(featuresBuilder, incoming)
dataset = transaction.createNewDataSet(analysisRegistrationDetails)
dataset.setDataSetType('HCS_ANALYSIS_WELL_FEATURES')
dataset.setFileFormatType('CSV')
commonDropbox.registerDerivedDataset(state, transaction, dataset, incoming, metadataParser)
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
register(incoming.getPath())
\ No newline at end of file
#! /usr/bin/env python
import commonImageDropbox
import commonDropbox
from java.util import Properties
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
def rollback_transaction(service, transaction, algorithmRunner, throwable):
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def getConfigurationProperties():
config = Properties()
config.setProperty("separator", ",")
config.setProperty("well-name-row", "File_Name")
config.setProperty("well-name-col", "File_Name")
config.setProperty("well-name-col-is-alphanum", "true")
return config
def register(incomingPath):
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incomingPath)
global iBrain2DatasetId
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
transaction = service.transaction(incoming, factory)
configProps = getConfigurationProperties()
incomingCsvFile = commonDropbox.findCSVFile(incomingPath)
analysisRegistrationDetails = factory.createFeatureVectorRegistrationDetails(incomingCsvFile, configProps)
dataset = transaction.createNewDataSet(analysisRegistrationDetails)
dataset.setDataSetType('HCS_ANALYSIS_WELL_QUALITY')
dataset.setFileFormatType('CSV')
commonDropbox.registerDerivedDataset(state, transaction, dataset, incoming, metadataParser)
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
register(incoming.getPath())
\ No newline at end of file
#! /usr/bin/env python
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
def register(incomingPath):
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incomingPath)
global iBrain2DatasetId
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
openbisDatasetParent = metadataParser.getParentDatasetPermId()
(space, plate) = commonDropbox.tryGetConnectedPlate(state, openbisDatasetParent, iBrain2DatasetId, incomingPath)
if plate == None:
return
imageDataset = commonImageDropbox.IBrain2ImageDataSetConfig()
imageDataset.setOverviewImageDatasetType()
imageDataset.setPlate(space, plate)
imageDataset.setFileFormatType("JPG")
imageDataset.setRecognizedImageExtensions(["jpg", "jpeg", "png", "gif"])
imageDataset.setStoreChannelsOnExperimentLevel(False)
imageDataset.setGenerateThumbnails(True)
imageDataset.setMaxThumbnailWidthAndHeight(imageDataset.THUMBANAIL_SIZE)
commonDropbox.setImageDatasetPropertiesAndRegister(imageDataset, metadataParser, incoming, service, factory)
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
register(incoming.getPath())
#! /usr/bin/env python
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
""" sample type code of the plate, needed if a new sample is registered automatically """
PLATE_TYPE_CODE = "PLATE"
SIRNA_EXP_TYPE = "SIRNA_HCS"
PLATE_GEOMETRY_PROPERTY_CODE = "$PLATE_GEOMETRY"
iBrain2DatasetId = None
def createPlateWithExperimentIfNeeded(transaction, assayParser, plateCode, spaceCode, plateGeometry):
projectCode = assayParser.get(assayParser.EXPERIMENTER_PROPERTY)
experiment = assayParser.get(assayParser.ASSAY_ID_PROPERTY)
experimentDesc = assayParser.get(assayParser.ASSAY_DESC_PROPERTY)
experimentType = assayParser.get(assayParser.ASSAY_TYPE_PROPERTY)
if transaction.getSpace(spaceCode) == None:
transaction.createNewSpace(spaceCode, None)
sampleIdentifier = "/"+spaceCode+"/"+plateCode
plate = transaction.getSample(sampleIdentifier)
if plate == None:
projectIdent = "/" + spaceCode +"/" + projectCode
if transaction.getProject(projectIdent) == None:
transaction.createNewProject(projectIdent)
expIdentifier = projectIdent + "/"+experiment
experiment = transaction.getExperiment(expIdentifier)
if experiment == None:
experiment = transaction.createNewExperiment(expIdentifier, SIRNA_EXP_TYPE)
openbisExpDesc = experimentDesc + " (type: "+experimentType + ")"
experiment.setPropertyValue("DESCRIPTION", openbisExpDesc)
plate = transaction.createNewSample(sampleIdentifier, PLATE_TYPE_CODE)
plate.setPropertyValue(PLATE_GEOMETRY_PROPERTY_CODE, plateGeometry)
plate.setExperiment(experiment)
return plate
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
if incoming.isDirectory():
incomingPath = incoming.getPath()
metadataParser = commonDropbox.AcquiredDatasetMetadataParser(incomingPath)
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
assayParser = commonDropbox.AssayParser(incomingPath)
imageDataset = commonImageDropbox.IBrain2ImageDataSetConfig()
imageDataset.setRawImageDatasetType()
imageDataset.setFileFormatType("TIFF")
imageDataset.setRecognizedImageExtensions(["tif", "tiff"])
imageDataset.setStoreChannelsOnExperimentLevel(False)
imageDataset.setGenerateThumbnails(True)
imageDataset.setMaxThumbnailWidthAndHeight(imageDataset.THUMBANAIL_SIZE)
imageRegistrationDetails = factory.createImageRegistrationDetails(imageDataset, incoming)
for propertyCode, value in metadataParser.getDatasetPropertiesIter():
imageRegistrationDetails.setPropertyValue(propertyCode, value)
tr = service.transaction(incoming, factory)
plate = metadataParser.getPlateCode()
space = assayParser.get(assayParser.LAB_LEADER_PROPERTY)
plateGeometry = factory.figureGeometry(imageRegistrationDetails)
plate = createPlateWithExperimentIfNeeded(tr, assayParser, plate, space, plateGeometry)
dataset = tr.createNewDataSet(imageRegistrationDetails)
dataset.setSample(plate)
imageDataSetFolder = tr.moveFile(incomingPath, dataset)
if tr.commit():
commonDropbox.createSuccessStatus(iBrain2DatasetId, dataset, incomingPath)
\ No newline at end of file
#! /usr/bin/env python
# This is a dropbox for importing HCS segmentation image datasets
from ch.systemsx.cisd.openbis.dss.etl.dto.api.v1 import *
import commonImageDropbox
import commonDropbox
reload(commonImageDropbox)
reload(commonDropbox)
# Global variable where we set the iBrain2 id of the dataset at the beginning,
# so that the rollback can use it as well.
iBrain2DatasetId = None
def rollback_service(service, throwable):
global iBrain2DatasetId
commonDropbox.createFailureStatus(iBrain2DatasetId, throwable, incoming)
def rollback_transaction(service, transaction, algorithmRunner, throwable):
rollback_service(service, throwable)
def register(incomingPath):
metadataParser = commonDropbox.DerivedDatasetMetadataParser(incomingPath)
global iBrain2DatasetId
iBrain2DatasetId = metadataParser.getIBrain2DatasetId()
openbisDatasetParent = metadataParser.getParentDatasetPermId()
(space, plate) = commonDropbox.tryGetConnectedPlate(state, openbisDatasetParent, iBrain2DatasetId, incomingPath)
if plate == None:
return
imageDataset = commonImageDropbox.IBrain2SegmentationImageDataSetConfig()
imageDataset.setSegmentationImageDatasetType()
imageDataset.setPlate(space, plate)
imageDataset.setFileFormatType("TIFF")
imageDataset.setRecognizedImageExtensions(["tif"])
imageDataset.setGenerateThumbnails(True)
imageDataset.setMaxThumbnailWidthAndHeight(imageDataset.THUMBANAIL_SIZE)
imageDataset.setGenerateHighQualityThumbnails(True)
imageDataset.setStoreChannelsOnExperimentLevel(False)
imageDataset.setOriginalDataStorageFormat(OriginalDataStorageFormat.HDF5)
commonDropbox.setImageDatasetPropertiesAndRegister(imageDataset, metadataParser, incoming, service, factory)
if incoming.isDirectory():
register(incoming.getPath())
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment