Newer
Older
def new_dataset(self, type=None, files=None, props=None, folder=None, **kwargs):
""" Creates a new dataset of a given sample type.
"""
if files is None:
raise ValueError('please provide at least one file')
elif isinstance(files, str):
files = [files]
type_obj = self.get_dataset_type(type.upper())
return DataSet(self, type=type_obj, files=files, folder=folder, props=props, **kwargs)
def new_semantic_annotation(self, entityType=None, propertyType=None, **kwargs):
return SemanticAnnotation(openbis_obj=self, isNew=True, entityType=entityType, propertyType=propertyType, **kwargs)
def _get_dss_url(self, dss_code=None):
""" internal method to get the downloadURL of a datastore.
"""
Swen Vermeul
committed
dss = self.get_datastores()
if dss_code is None:
return dss['downloadUrl'][0]
else:
Swen Vermeul
committed
return dss[dss['code'] == dss_code]['downloadUrl'][0]
Swen Vermeul
committed
def upload_files(self, datastore_url=None, files=None, folder=None, wait_until_finished=False):
Swen Vermeul
committed
if datastore_url is None:
if files is None:
raise ValueError("Please provide a filename.")
Swen Vermeul
committed
if folder is None:
# create a unique foldername
folder = time.strftime('%Y-%m-%d_%H-%M-%S')
if isinstance(files, str):
files = [files]
self.files = files
self.startByte = 0
self.endByte = 0
# define a queue to handle the upload threads
queue = DataSetUploadQueue()
real_files = []
for filename in files:
if os.path.isdir(filename):
real_files.extend(
[os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(filename)) for f in fn])
else:
real_files.append(os.path.join(filename))
# compose the upload-URL and put URL and filename in the upload queue
for filename in real_files:
file_in_wsp = os.path.join(folder, filename)
Swen Vermeul
committed
self.files_in_wsp.append(file_in_wsp)
upload_url = (
Swen Vermeul
committed
datastore_url + '/session_workspace_file_upload'
+ '?filename=' + os.path.join(folder, quote(filename))
+ '&id=1'
+ '&startByte=0&endByte=0'
+ '&sessionID=' + self.token
)
queue.put([upload_url, filename, self.verify_certificates])
# wait until all files have uploaded
if wait_until_finished:
queue.join()
# return files with full path in session workspace
Swen Vermeul
committed
return self.files_in_wsp
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
def __init__(self, workers=20):
# maximum files to be uploaded at once
self.upload_queue = Queue()
# define number of threads and start them
for t in range(workers):
t = Thread(target=self.upload_file)
t.daemon = True
t.start()
def put(self, things):
""" expects a list [url, filename] which is put into the upload queue
"""
self.upload_queue.put(things)
def join(self):
""" needs to be called if you want to wait for all uploads to be finished
"""
self.upload_queue.join()
def upload_file(self):
while True:
# get the next item in the queue
upload_url, filename, verify_certificates = self.upload_queue.get()
filesize = os.path.getsize(filename)
# upload the file to our DSS session workspace
with open(filename, 'rb') as f:
resp = requests.post(upload_url, data=f, verify=verify_certificates)
resp.raise_for_status()
data = resp.json()
assert filesize == int(data['size'])
# Tell the queue that we are done
self.upload_queue.task_done()
Swen Vermeul
committed
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
def __init__(self, workers=20):
# maximum files to be downloaded at once
self.download_queue = Queue()
# define number of threads
for t in range(workers):
t = Thread(target=self.download_file)
t.daemon = True
t.start()
def put(self, things):
""" expects a list [url, filename] which is put into the download queue
"""
self.download_queue.put(things)
def join(self):
""" needs to be called if you want to wait for all downloads to be finished
"""
self.download_queue.join()
def download_file(self):
while True:
url, filename, file_size, verify_certificates = self.download_queue.get()
Swen Vermeul
committed
# create the necessary directory structure if they don't exist yet
os.makedirs(os.path.dirname(filename), exist_ok=True)
# request the file in streaming mode
r = requests.get(url, stream=True, verify=verify_certificates)
Swen Vermeul
committed
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
Swen Vermeul
committed
f.write(chunk)
assert os.path.getsize(filename) == int(file_size)
Swen Vermeul
committed
self.download_queue.task_done()
Swen Vermeul
committed
def __init__(self, openbis_obj, type, data=None, props=None, **kwargs):
self.__dict__['openbis'] = openbis_obj
self.__dict__['type'] = type
self.__dict__['p'] = PropertyHolder(openbis_obj, type)
self.__dict__['a'] = AttrHolder(openbis_obj, 'DataSet', type)
# existing OpenBIS object
if data is not None:
self._set_data(data)
Swen Vermeul
committed
if props is not None:
for key in props:
setattr(self.p, key, props[key])
if kwargs is not None:
for key in kwargs:
setattr(self, key, kwargs[key])
def __eq__(self, other):
return str(self) == str(other)
def __ne__(self, other):
return str(self) != str(other)
def _set_data(self, data):
# assign the attribute data to self.a by calling it
# (invoking the AttrHolder.__call__ function)
self.a(data)
self.__dict__['data'] = data
# put the properties in the self.p namespace (without checking them)
if 'properties' in data:
for key, value in data['properties'].items():
self.p.__dict__[key.lower()] = value
Swen Vermeul
committed
@property
def attrs(self):
return self.__dict__['a']
return self.openbis.get_project(self._project['identifier'])
except Exception:
pass
return self.openbis.get_experiment(self._experiment['identifier'])
except Exception:
pass
return self.openbis.get_sample(self._sample['identifier'])
except Exception:
pass
def __getattr__(self, name):
return getattr(self.__dict__['a'], name)
def __setattr__(self, name, value):
if name in ['set_properties', 'set_tags', 'add_tags']:
raise ValueError("These are methods which should not be overwritten")
setattr(self.__dict__['a'], name, value)
def _repr_html_(self):
"""Print all the assigned attributes (identifier, tags, etc.) in a nicely formatted table. See
AttributeHolder class.
"""
return self.a._repr_html_()
def __repr__(self):
"""same thing as _repr_html_() but for IPython
"""
return self.a.__repr__()
Swen Vermeul
committed
class PhysicalData():
def __init__(self, data=None):
if data is None:
data = []
self.data = data
self.attrs = ['speedHint', 'complete', 'shareId', 'size',
'fileFormatType', 'storageFormat', 'location', 'presentInArchive',
'storageConfirmation', 'locatorType', 'status']
Swen Vermeul
committed
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
def __dir__(self):
return self.attrs
def __getattr__(self, name):
if name in self.attrs:
if name in self.data:
return self.data[name]
else:
return ''
def _repr_html_(self):
html = """
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th>attribute</th>
<th>value</th>
</tr>
</thead>
<tbody>
"""
for attr in self.attrs:
html += "<tr> <td>{}</td> <td>{}</td> </tr>".format(
attr, getattr(self, attr, '')
Swen Vermeul
committed
)
html += """
</tbody>
</table>
"""
return html
def __repr__(self):
headers = ['attribute', 'value']
lines = []
for attr in self.attrs:
lines.append([
attr,
getattr(self, attr, '')
])
return tabulate(lines, headers=headers)
class DataSet(OpenBisObject):
""" DataSet are openBIS objects that contain the actual files.
"""
def __init__(self, openbis_obj, type=type, data=None, files=None, folder=None, props=None, **kwargs):
Swen Vermeul
committed
super(DataSet, self).__init__(openbis_obj, type, data, props, **kwargs)
# existing DataSet
if data is not None:
if data['physicalData'] is None:
self.__dict__['shareId'] = None
self.__dict__['location'] = None
else:
self.__dict__['shareId'] = data['physicalData']['shareId']
self.__dict__['location'] = data['physicalData']['location']
# new DataSet
if files is not None:
self.__dict__['files'] = files
self.__dict__['folder'] = folder
def __str__(self):
return self.data['code']
def __dir__(self):
'props', 'get_parents()', 'get_children()',
'add_parents()', 'add_children()', 'del_parents()', 'del_children()',
Swen Vermeul
committed
'sample', 'experiment', 'physicalData',
'tags', 'set_tags()', 'add_tags()', 'del_tags()',
'add_attachment()', 'get_attachments()', 'download_attachments()',
"get_files(start_folder='/')", 'file_list',
'download(files=None, destination=None, wait_until_finished=True)',
'status', 'archive()', 'unarchive()', 'data'
def __setattr__(self, name, value):
if name in ['folder']:
self.__dict__[name] = value
else:
super(DataSet, self).__setattr__(name, value)
@property
def props(self):
return self.__dict__['p']
@property
def type(self):
return self.__dict__['type']
@type.setter
def type(self, type_name):
dataset_type = self.openbis.get_dataset_type(type_name.upper())
self.p.__dict__['_type'] = dataset_type
self.a.__dict__['_type'] = dataset_type
Swen Vermeul
committed
@property
def physicalData(self):
if 'physicalData' in self.data:
return PhysicalData(self.data['physicalData'])
# return self.data['physicalData']
Swen Vermeul
committed
@property
def status(self):
ds = self.openbis.get_dataset(self.permId)
self.data['physicalData'] = ds.data['physicalData']
try:
return self.data['physicalData']['status']
except Exception:
return None
def archive(self, remove_from_data_store=True):
fetchopts = {
"removeFromDataStore": remove_from_data_store,
"@type": "as.dto.dataset.archive.DataSetArchiveOptions"
}
self.archive_unarchive('archiveDataSets', fetchopts)
print("DataSet {} archived".format(self.permId))
def unarchive(self):
fetchopts = {
"@type": "as.dto.dataset.unarchive.DataSetUnarchiveOptions"
}
self.archive_unarchive('unarchiveDataSets', fetchopts)
print("DataSet {} unarchived".format(self.permId))
def archive_unarchive(self, method, fetchopts):
dss = self.get_datastore
payload = {}
request = {
"method": method,
"params": [
self.openbis.token,
[{
"permId": self.permId,
"@type": "as.dto.dataset.id.DataSetPermId"
}],
dict(fetchopts)
],
}
resp = self.openbis._post_request(self._openbis.as_v3, request)
return
def set_properties(self, properties):
self.openbis.update_dataset(self.permId, properties=properties)
Swen Vermeul
committed
def download(self, files=None, destination=None, wait_until_finished=True, workers=10):
""" download the actual files and put them by default in the following folder:
__current_dir__/destination/dataset_permId/
If no files are specified, all files of a given dataset are downloaded.
If no destination is specified, the hostname is chosen instead.
Files are usually downloaded in parallel, using 10 workers by default. If you want to wait until
all the files are downloaded, set the wait_until_finished option to True.
Swen Vermeul
committed
"""
if files == None:
elif isinstance(files, str):
files = [files]
if destination is None:
destination = self.openbis.hostname
base_url = self.data['dataStore']['downloadUrl'] + '/datastore_server/' + self.permId + '/'
Swen Vermeul
committed
queue = DataSetDownloadQueue(workers=workers)
# get file list and start download
for filename in files:
file_info = self.get_file_list(start_folder=filename)
file_size = file_info[0]['fileSize']
download_url = base_url + filename + '?sessionID=' + self.openbis.token
filename_dest = os.path.join(destination, self.permId, filename)
queue.put([download_url, filename_dest, file_size, self.openbis.verify_certificates])
Swen Vermeul
committed
# wait until all files have downloaded
if wait_until_finished:
queue.join()
print("Files downloaded to: %s" % os.path.join(destination, self.permId))
@property
def folder(self):
return self.__dict__['folder']
def file_list(self):
"""returns the list of files including their directories as an array of strings. Just folders are not
listed.
"""
files = []
for file in self.get_file_list(recursive=True):
if file['isDirectory']:
pass
else:
files.append(file['pathInDataSet'])
return files
def get_files(self, start_folder='/'):
"""Returns a DataFrame of all files in this dataset
"""
def createRelativePath(pathInDataSet):
if self.shareId is None:
return ''
else:
return os.path.join(self.shareId, self.location, pathInDataSet)
def signed_to_unsigned(sig_int):
"""openBIS delivers crc32 checksums as signed integers.
If the number is negative, we just have to add 2**32
We display the hex number to match with the classic UI
"""
if sig_int < 0:
sig_int += 2 ** 32
return "%x" % (sig_int & 0xFFFFFFFF)
files = self.get_file_list(start_folder=start_folder)
df = DataFrame(files)
df['relativePath'] = df['pathInDataSet'].map(createRelativePath)
df['crc32Checksum'] = df['crc32Checksum'].fillna(0.0).astype(int).map(signed_to_unsigned)
return df[['isDirectory', 'pathInDataSet', 'fileSize', 'crc32Checksum']]
def get_file_list(self, recursive=True, start_folder="/"):
"""Lists all files of a given dataset. You can specifiy a start_folder other than "/".
By default, all directories and their containing files are listed recursively. You can
turn off this option by setting recursive=False.
"""
"method": "listFilesForDataSet",
"params": [
self.permId,
"id": "1"
self.data["dataStore"]["downloadUrl"] + '/datastore_server/rmi-dss-api-v1.json',
json.dumps(request),
verify=self.openbis.verify_certificates
)
data = resp.json()
if 'error' in data:
raise ValueError('Error from openBIS: ' + data['error']['message'])
elif 'result' in data:
return data['result']
Swen Vermeul
committed
raise ValueError('request to openBIS did not return either result nor error')
Swen Vermeul
committed
raise ValueError('internal error while performing post request')
def _generate_plugin_request(self, dss):
"""generates a request to activate the dataset-uploader ingestion plugin to
register our files as a new dataset
"""
sample_identifier = None
if self.sample is not None:
sample_identifier = self.sample.identifier
experiment_identifier = None
if self.experiment is not None:
experiment_identifier = self.experiment.identifier
parentIds = self.parents
dataset_type = self.type.code
metadata = self.props.all_nonempty()
request = {
"method": "createReportFromAggregationService",
"params": [
self.openbis.token,
dss,
PYBIS_PLUGIN,
{
"sampleId": sample_identifier,
"experimentId": experiment_identifier,
"dataSets": [ {
"dataSetType": dataset_type,
"folder": self.folder,
"sessionWorkspaceFolder": "",
"fileNames": self.files,
"properties": metadata,
"parentIds": parentIds
} ]
}
],
}
def save(self):
if self.is_new:
2560
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
if self.files is None or len(self.files) == 0:
raise ValueError('Cannot register a dataset without a file. Please provide at least one file')
if self.sample is None and self.experiment is None:
raise ValueError('A DataSet must be either connected to a Sample or an Experiment')
# upload the data to the user session workspace
datastores = self.openbis.get_datastores()
self.openbis.upload_files(
datastore_url= datastores['downloadUrl'][0],
files=self.files,
folder='',
wait_until_finished=True
)
# activate the ingestion plugin, as soon as the data is uploaded
request = self._generate_plugin_request(dss=datastores['code'][0])
resp = self.openbis._post_request(self.openbis.reg_v1, request)
if resp['rows'][0][0]['value'] == 'OK':
permId = resp['rows'][0][2]['value']
if permId is None or permId == '':
self.__dict__['is_new'] = False
print("DataSet successfully created. Because you connected to an openBIS version older than 16.05.04, you cannot update the object.")
else:
new_dataset_data = self.openbis.get_dataset(permId, only_data=True)
self._set_data(new_dataset_data)
print("DataSet successfully created.")
else:
raise ValueError('Error while creating the DataSet: ' + resp['rows'][0][1]['value'])
self.__dict__['_is_new'] = False
else:
request = self._up_attrs()
props = self.p._all_props()
request["params"][1][0]["properties"] = props
request["params"][1][0].pop('parentIds')
request["params"][1][0].pop('childIds')
self.openbis._post_request(self.openbis.as_v3, request)
print("DataSet successfully updated.")
class AttrHolder():
""" General class for both samples and experiments that hold all common attributes, such as:
- space
Swen Vermeul
committed
- experiment (sample)
- samples (experiment)
Swen Vermeul
committed
- parents (sample, dataset)
- children (sample, dataset)
- tags
"""
def __init__(self, openbis_obj, entity, type=None):
self.__dict__['_openbis'] = openbis_obj
self.__dict__['_entity'] = entity
if type is not None:
self.__dict__['_allowed_attrs'] = _definitions(entity)['attrs']
self.__dict__['_identifier'] = None
self.__dict__['_is_new'] = True
def __call__(self, data):
"""This internal method is invoked when an existing object is loaded.
Instead of invoking a special method we «call» the object with the data
self(data)
which automatically invokes this method.
Since the data comes from openBIS, we do not have to check it (hence the
self.__dict__ statements to prevent invoking the __setattr__ method)
Internally data is stored with an underscore, e.g.
sample._space --> { '@id': 4,
'@type': 'as.dto.space.id.SpacePermId',
'permId': 'MATERIALS' }
but when fetching the attribute without the underscore, we only return
the relevant data for the user:
sample.space --> 'MATERIALS'
"""
self.__dict__['_is_new'] = False
Swen Vermeul
committed
for attr in self._allowed_attrs:
if attr in ["code", "permId", "identifier",
"type", "container", "components"]:
self.__dict__['_' + attr] = data.get(attr, None)
Swen Vermeul
committed
d = data.get(attr, None)
Swen Vermeul
committed
if d is not None:
d = d['permId']
self.__dict__['_' + attr] = d
Swen Vermeul
committed
elif attr in ["sample", "experiment", "project"]:
d = data.get(attr, None)
Swen Vermeul
committed
if d is not None:
d = d['identifier']
self.__dict__['_' + attr] = d
Swen Vermeul
committed
elif attr in ["parents", "children", "samples"]:
self.__dict__['_' + attr] = []
Swen Vermeul
committed
for item in data[attr]:
self.__dict__['_' + attr].append(item['identifier'])
self.__dict__['_' + attr].append(item['permId'])
Swen Vermeul
committed
Swen Vermeul
committed
for item in data[attr]:
Swen Vermeul
committed
"code": item['code'],
"@type": "as.dto.tag.id.TagCode"
})
self.__dict__['_tags'] = tags
self.__dict__['_prev_tags'] = copy.deepcopy(tags)
self.__dict__['_' + attr] = data.get(attr, None)
"""Returns the Python-equivalent JSON request when a new object is created.
It is used internally by the save() method of a newly created object.
"""
defs = _definitions(self.entity)
attr2ids = _definitions('attr2ids')
new_obj = {
"@type": "as.dto.{}.create.{}Creation".format(self.entity.lower(), self.entity)
}
for attr in defs['attrs_new']:
items = None
if attr == 'type':
new_obj['typeId'] = self._type['permId']
continue
elif attr == 'attachments':
attachments = getattr(self, '_new_attachments')
if attachments is None:
continue
atts_data = [attachment.get_data() for attachment in attachments]
items = atts_data
elif attr in defs['multi']:
# parents, children, components, container, tags, attachments
items = getattr(self, '_' + attr)
if items is None:
items = []
else:
items = getattr(self, '_' + attr)
key = None
if attr in attr2ids:
# translate parents into parentIds, children into childIds etc.
key = attr2ids[attr]
else:
key = attr
new_obj[key] = items
request = {
"method": "create{}s".format(self.entity),
"params": [
self.openbis.token,
[new_obj]
]
}
return request
Swen Vermeul
committed
Swen Vermeul
committed
"""Returns the Python-equivalent JSON request when a new object is updated.
It is used internally by the save() method of an object to be updated.
"""
attr2ids = _definitions('attr2ids')
"@type": "as.dto.{}.update.{}Update".format(self.entity.lower(), self.entity),
defs["identifier"]: self._permId
}
# look at all attributes available for that entity
# that can be updated
for attr in defs['attrs_up']:
items = None
if attr == 'attachments':
# v3 API currently only supports adding attachments
attachments = self.__dict__.get('_new_attachments', None)
if attachments is None:
continue
atts_data = [attachment.get_data() for attachment in attachments]
Swen Vermeul
committed
up_obj['attachments'] = {
"actions": [{
"items": atts_data,
"@type": "as.dto.common.update.ListUpdateActionAdd"
}],
"@type": "as.dto.attachment.update.AttachmentListUpdateValue"
}
elif attr == 'tags':
# look which tags have been added or removed and update them
if getattr(self, '_prev_tags') is None:
self.__dict__['_prev_tags'] = []
actions = []
for tagId in self._prev_tags:
if tagId not in self._tags:
actions.append({
"items": [tagId],
"@type": "as.dto.common.update.ListUpdateActionRemove"
})
for tagId in self._tags:
if tagId not in self._prev_tags:
actions.append({
"items": [tagId],
"@type": "as.dto.common.update.ListUpdateActionAdd"
})
"@type": "as.dto.common.update.IdListUpdateValue",
"actions": actions
}
elif '_' + attr in self.__dict__:
# handle multivalue attributes (parents, children, tags etc.)
# we only cover the Set mechanism, which means we always update
# all items in a list
if attr in defs['multi']:
items = self.__dict__.get('_' + attr, [])
if items == None:
items = []
up_obj[attr2ids[attr]] = {
"actions": [
{
"items": items,
"@type": "as.dto.common.update.ListUpdateActionSet",
}
],
"@type": "as.dto.common.update.IdListUpdateValue"
}
else:
# handle single attributes (space, experiment, project, container, etc.)
value = self.__dict__.get('_' + attr, {})
Swen Vermeul
committed
elif len(value) == 0:
# value is {}: it means that we want this attribute to be
# deleted, not updated.
up_obj[attr2ids[attr]] = {
"@type": "as.dto.common.update.FieldUpdateValue",
"isModified": True,
}
elif 'isModified' in value and value['isModified'] == True:
val = {}
for x in ['identifier','permId','@type']:
if x in value:
val[x] = value[x]
"@type": "as.dto.common.update.FieldUpdateValue",
Swen Vermeul
committed
"isModified": True,
"value": val
request = {
"method": "update{}s".format(self.entity),
"params": [
self.openbis.token,
[up_obj]
return request
def __getattr__(self, name):
""" handles all attribute requests dynamically. Values are returned in a sensible way,
for example the identifiers of parents, children and components are returned
as an array of values.
"""
int_name = '_' + name
if int_name in self.__dict__:
if int_name in ['_attachments']:
return [
"fileName": x['fileName'],
"title": x['title'],
"description": x['description']
} for x in self._attachments
]
if int_name in ['_registrator', '_modifier', '_dataProducer']:
return self.__dict__[int_name].get('userId', None)
Swen Vermeul
committed
elif int_name in ['_registrationDate', '_modificationDate', '_accessDate', '_dataProductionDate']:
return format_timestamp(self.__dict__[int_name])
# if the attribute contains a list,
# return a list of either identifiers, codes or
# permIds (whatever is available first)
elif isinstance(self.__dict__[int_name], list):
values = []
for item in self.__dict__[int_name]:
if "identifier" in item:
values.append(item['identifier'])
elif "code" in item:
values.append(item['code'])
elif "permId" in item:
values.append(item['permId'])
else:
pass
return values
# attribute contains a dictionary: same procedure as above.
elif isinstance(self.__dict__[int_name], dict):
if "identifier" in self.__dict__[int_name]:
return self.__dict__[int_name]['identifier']
elif "code" in self.__dict__[int_name]:
return self.__dict__[int_name]['code']
elif "permId" in self.__dict__[int_name]:
return self.__dict__[int_name]['permId']
else:
return self.__dict__[int_name]
else:
return None
def __setattr__(self, name, value):
"""This method is always invoked whenever we assign an attribute to an
object, e.g.
new_sample.space = 'MATERIALS'
new_sample.parents = ['/MATERIALS/YEAST747']
"""
if name in ["parents", "children", "components"]:
if not isinstance(value, list):
value = [value]
objs = []
for val in value:
if isinstance(val, str):
# fetch objects in openBIS, make sure they actually exists
obj = getattr(self._openbis, 'get_' + self._entity.lower())(val)
objs.append(obj)
elif getattr(val, '_permId'):
# we got an existing object
objs.append(val)
permids = []
for item in objs:
permid = item._permId
# remove any existing @id keys to prevent jackson parser errors
permids.append(permid)
Swen Vermeul
committed
# setting self._parents = [{
# '@type': 'as.dto.sample.id.SampleIdentifier',
# 'identifier': '/SPACE_NAME/SAMPLE_NAME'
# }]
self.__dict__['_' + name] = permids
elif name in ["tags"]:
self.set_tags(value)
elif name in ["attachments"]:
if isinstance(value, list):
for item in value:
if isinstance(item, dict):
self.add_attachment(**item)
else:
self.add_attachment(item)
else:
self.add_attachment(value)
Swen Vermeul
committed
elif name in ["space"]:
# fetch object in openBIS, make sure it actually exists
obj = getattr(self._openbis, "get_" + name)(value)
self.__dict__['_' + name] = obj.data['permId']
# mark attribute as modified, if it's an existing entity
self.__dict__['_' + name]['isModified'] = True
Swen Vermeul
committed
elif name in ["sample", "experiment", "project"]:
obj = None
if isinstance(value, str):
# fetch object in openBIS, make sure it actually exists
obj = getattr(self._openbis, "get_" + name)(value)
elif value is None:
self.__dict__['_'+name] = {}
return
else:
obj = value
self.__dict__['_' + name] = obj.data['identifier']
# mark attribute as modified, if it's an existing entity
if self.is_new:
pass
else:
self.__dict__['_' + name]['isModified'] = True
elif name in ["identifier"]:
raise KeyError("you can not modify the {}".format(name))
elif name == "code":
try:
if self._type.data['autoGeneratedCode']:
raise KeyError("for this {}Type you can not set a code".format(self.entity))
except AttributeError:
pass
elif name == "description":
self.__dict__['_description'] = value
else:
raise KeyError("no such attribute: {}".format(name))
def get_type(self):
Swen Vermeul
committed
def _ident_for_whatever(self, whatever):
if isinstance(whatever, str):
# fetch parent in openBIS, we are given an identifier
obj = getattr(self._openbis, 'get_'+self._entity.lower())(whatever)
else:
# we assume we got an object
obj = whatever
ident = None