Skip to content
Snippets Groups Projects
Commit 8a11d561 authored by Swen Vermeul's avatar Swen Vermeul
Browse files

added controlled vocabulary when datasettypes are fetched. added experiment...

added controlled vocabulary when datasettypes are fetched. added experiment and properties to dataset upload. added some debug information on server side.
parent edcf49af
No related branches found
No related tags found
No related merge requests found
......@@ -2,7 +2,6 @@ from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
from pybis import Openbis
import numpy as np
import os
from urllib.parse import unquote
import yaml
......@@ -31,8 +30,8 @@ def _load_configuration(paths, filename='openbis-connections.yaml'):
config = yaml.safe_load(stream)
for connection in config['connections']:
connections.append(connection)
except yaml.YAMLexception as exc:
print(exc)
except yaml.YAMLexception as e:
print(e)
return None
return connections
......@@ -473,9 +472,14 @@ class DataSetTypesHandler(IPythonHandler):
# and add it to the dataset collection
for dt in dts:
dataset_type = conn.openbis.get_dataset_type(dt['code'])
pa = dataset_type.get_propertyAssignments()
pa_dict = pa.to_dict(orient='records')
dt['propertyAssignments'] = pa_dict
pa = dataset_type.get_propertyAssignments(including_vocabulary=True)
pa_dicts = pa.to_dict(orient='records')
for pa_dict in pa_dicts:
if pa_dict['dataType'] == 'CONTROLLEDVOCABULARY':
terms = conn.openbis.get_terms(pa_dict['vocabulary']['code'])
pa_dict['terms'] = terms.df[['code','label','description','official','ordinal']].to_dict(orient='records')
dt['propertyAssignments'] = pa_dicts
self.write({
"dataSetTypes": dts
......@@ -483,6 +487,7 @@ class DataSetTypesHandler(IPythonHandler):
return
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason":'Could not fetch dataset-types: {}'.format(e)
......@@ -491,60 +496,112 @@ class DataSetTypesHandler(IPythonHandler):
class DataSetUploadHandler(IPythonHandler):
"""Handle the requests for /openbis/dataset/connection"""
"""Handle the POST requests for /openbis/dataset/connection_name"""
def upload_data(self, conn, data):
if not conn.is_session_active():
try:
conn.login()
except Exception as exc:
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason": 'connection to {} could not be established: {}'.format(conn.name, exc)
"reason": 'connection to {} could not be established: {}'.format(conn.name, e)
})
return
try:
sample = conn.openbis.get_sample(data.get('sampleIdentifier'))
except Exception as exc:
self.set_status(404)
self.write({
"reason" : 'No such sample: {}'.format(data.get('sampleIdentifier'))
})
return
errors = []
sample = None
experiment = None
if (data.get('sampleIdentifier')):
try:
sample = conn.openbis.get_sample(data.get('sampleIdentifier'))
except Exception as e:
print(e)
errors.append(
{"sampleIdentifier" : 'No such sample: {}'.format(data.get('sampleIdentifier')) }
)
else:
errors.append(
{"sampleIdentifier": "please provide a sample identifier"}
)
print("--------------_HERE_-----------1")
if (data.get('experimentIdentifier')):
try:
experiment = conn.openbis.get_experiment(data.get('experimentIdentifier'))
except Exception as e:
print(e)
errors.append(
{"experimentIdentifier" : 'No such experiment: {}'.format(data.get('experimentIdentifier')) }
)
print("--------------_HERE_-----------2")
filenames = []
for filename in data.get('files'):
filename = unquote(filename)
filenames.append(filename)
if os.path.isfile(filename):
filenames.append(filename)
else:
errors.append({
"file": "File not found: {}".format(filename)
})
print("--------------_HERE_-----------3")
try:
ds = conn.openbis.new_dataset(
name = data.get('name'),
description = data.get('description'),
type = data.get('type'),
sample = sample,
files = filenames
)
except Exception as exc:
self.write({
"reason": 'Error while creating the dataset: {}'.format(exc)
try:
dataset = conn.openbis.new_dataset(
type = data.get('type'),
sample = sample,
experiment = experiment,
files = filenames,
)
except Exception as e:
print(e)
errors.append({
"create": 'Error while creating the dataset: {}'.format(e)
})
print("--------------_HERE_-----------4")
# write errors back if already occured
if errors:
self.set_status(500)
self.write({ "errors": errors })
return
print("--------------_HERE_-----------5")
# try to set the properties
if (data.get('props')):
props = data.et('props')
for prop, value in props.items():
try:
setattr(dataset.props, prop, value)
except Exception as e:
errors.append({
"prop."+prop : e
})
print("--------------_HERE_-----------6")
try:
ds.save()
except Exception as exc:
dataset.save()
except Exception as e:
errors.append({
"save": 'Error while saving the dataset: {}'.format(e)
})
print("--------------_HERE_-----------7")
# write errors back if they occured
if errors:
self.set_status(500)
self.write({ "errors": errors })
else:
# ...or return a success message
self.write({
"reason": 'Error while saving the dataset: {}'.format(exc)
'status': 200,
'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId)
})
return
# return success message
self.write({
'status': 200,
'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, ds.permId)
})
print("--------------_HERE_-----------8")
def post(self, **params):
"""Handle a request to /openbis/dataset/connection_name/permId
......@@ -560,4 +617,6 @@ class DataSetUploadHandler(IPythonHandler):
return
data = self.get_json_body()
results = self.upload_data(conn=conn,data=data)
print("Received DATA")
print(data)
self.upload_data(conn=conn,data=data)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment