Skip to content
Snippets Groups Projects
Commit a73819b5 authored by Swen Vermeul's avatar Swen Vermeul
Browse files

implemented dataSet CONTAINERS

parent b1d087d7
No related branches found
No related tags found
No related merge requests found
...@@ -17,7 +17,31 @@ class DataSet(OpenBisObject): ...@@ -17,7 +17,31 @@ class DataSet(OpenBisObject):
""" DataSet are openBIS objects that contain the actual files. """ DataSet are openBIS objects that contain the actual files.
""" """
def __init__(self, openbis_obj, type=type, data=None, files=None, folder=None, props=None, **kwargs): def __init__(self, openbis_obj, type=type, data=None, files=None, folder=None, kind=None, props=None, **kwargs):
if kwargs is None:
kwargs = {}
if kwargs.get('code'):
kwargs["autoGeneratedCode"] = False
else:
kwargs["autoGeneratedCode"] = True
if kind == 'PHYSICAL_DATA':
if files is None:
raise ValueError('please provide at least one file')
if isinstance(files, str):
files = [files]
for file in files:
if not os.path.exists(file):
raise ValueError('File {} does not exist'.format(file))
self.__dict__['files'] = files
# initialize the attributes
super(DataSet, self).__init__(openbis_obj, type, data, props, **kwargs) super(DataSet, self).__init__(openbis_obj, type, data, props, **kwargs)
self.__dict__['files_in_wsp'] = [] self.__dict__['files_in_wsp'] = []
...@@ -31,9 +55,15 @@ class DataSet(OpenBisObject): ...@@ -31,9 +55,15 @@ class DataSet(OpenBisObject):
self.__dict__['shareId'] = data['physicalData']['shareId'] self.__dict__['shareId'] = data['physicalData']['shareId']
self.__dict__['location'] = data['physicalData']['location'] self.__dict__['location'] = data['physicalData']['location']
# new DataSet
if files is not None: if kind is not None:
self.__dict__['files'] = files kind = kind.upper()
allowed_kinds = ['PHYSICAL_DATA', 'CONTAINER', 'LINK']
if kind not in allowed_kinds:
raise ValueError(
"only these values are allowed for kind: {}".format(allowed_kinds)
)
self.a.__dict__['_kind'] = kind
self.__dict__['folder'] = folder self.__dict__['folder'] = folder
...@@ -391,41 +421,67 @@ class DataSet(OpenBisObject): ...@@ -391,41 +421,67 @@ class DataSet(OpenBisObject):
def save(self): def save(self):
if self.is_new: if self.is_new:
if self.files is None or len(self.files) == 0: datastores = self.openbis.get_datastores()
raise ValueError('Cannot register a dataset without a file. Please provide at least one file') permId = None
if self.sample is None and self.experiment is None: if self.sample is None and self.experiment is None:
raise ValueError('A DataSet must be either connected to a Sample or an Experiment') raise ValueError('A DataSet must be either connected to a Sample or an Experiment')
# upload the data to the user session workspace if self.kind == 'PHYSICAL_DATA':
datastores = self.openbis.get_datastores() if self.files is None or len(self.files) == 0:
raise ValueError(
self.upload_files( 'Cannot register a dataset without a file. Please provide at least one file'
datastore_url= datastores['downloadUrl'][0], )
files=self.files,
folder='', # for uploading phyiscal data, we first upload it to the session workspace
wait_until_finished=True self.upload_files(
) datastore_url= datastores['downloadUrl'][0],
files=self.files,
# activate the ingestion plugin, as soon as the data is uploaded folder='',
request = self._generate_plugin_request(dss=datastores['code'][0]) wait_until_finished=True
)
resp = self.openbis._post_request(self.openbis.reg_v1, request)
# activate the ingestion plugin, as soon as the data is uploaded
if resp['rows'][0][0]['value'] == 'OK': # this will actually register the dataset in the datastore and the AS
permId = resp['rows'][0][2]['value'] request = self._generate_plugin_request(dss=datastores['code'][0])
if permId is None or permId == '': resp = self.openbis._post_request(self.openbis.reg_v1, request)
self.__dict__['is_new'] = False if resp['rows'][0][0]['value'] == 'OK':
if VERBOSE: print("DataSet successfully created. Because you connected to an openBIS version older than 16.05.04, you cannot update the object.") permId = resp['rows'][0][2]['value']
if permId is None or permId == '':
self.__dict__['is_new'] = False
if VERBOSE: print("DataSet successfully created. Because you connected to an openBIS version older than 16.05.04, you cannot update the object.")
else:
new_dataset_data = self.openbis.get_dataset(permId, only_data=True)
self._set_data(new_dataset_data)
if VERBOSE: print("DataSet successfully created.")
return self
else: else:
new_dataset_data = self.openbis.get_dataset(permId, only_data=True) raise ValueError('Error while creating the DataSet: ' + resp['rows'][0][1]['value'])
self._set_data(new_dataset_data) # CONTAINER
if VERBOSE: print("DataSet successfully created.")
else: else:
raise ValueError('Error while creating the DataSet: ' + resp['rows'][0][1]['value']) if self.files is not None and len(self.files) > 0:
raise ValueError(
'DataSets of kind CONTAINER or LINK cannot contain data'
)
request = self._new_attrs()
props = self.p._all_props()
DSpermId = datastores['code'][0]
request["params"][1][0]["properties"] = props
request["params"][1][0]["dataStoreId"] = {
"permId": DSpermId,
"@type": "as.dto.datastore.id.DataStorePermId"
}
resp = self.openbis._post_request(self.openbis.as_v3, request)
if VERBOSE: print("DataSet successfully created.")
new_dataset_data = self.openbis.get_dataset(resp[0]['permId'], only_data=True)
self._set_data(new_dataset_data)
return self
# updating the DataSEt
else: else:
request = self._up_attrs() request = self._up_attrs()
props = self.p._all_props() props = self.p._all_props()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment