Skip to content
Snippets Groups Projects
Commit 8af09bb9 authored by Swen Vermeul's avatar Swen Vermeul
Browse files

added all jupyter-openbis-server files

parent 270bed7a
No related branches found
No related tags found
No related merge requests found
name = 'jupyter-openbis-server.server'
__author__ = 'Swen Vermeul'
__email__ = 'swen@ethz.ch'
__version__ = '0.1.0'
def _jupyter_server_extension_paths():
return [{
"module": "jupyter-openbis-server.main"
}]
def load_jupyter_server_extension(nbapp):
nbapp.log.info("jupyter-openbis-server module enabled!")
import os
from pybis import Openbis
from notebook.base.handlers import IPythonHandler
openbis_connections = {}
def register_connection(connection_info):
conn = OpenBISConnection(
name = connection_info.get('name'),
url = connection_info.get('url'),
verify_certificates = connection_info.get('verify_certificates', False),
username = connection_info.get('username'),
password = connection_info.get('password'),
http_only = connection_info.get('http_only', False),
status = 'not connected',
)
openbis_connections[conn.name] = conn
return conn
class OpenBISConnection:
"""register an openBIS connection
"""
def __init__(self, **kwargs):
for needed_key in ['name', 'url']:
if needed_key not in kwargs:
raise KeyError("{} is missing".format(needed_key))
for key in kwargs:
setattr(self, key, kwargs[key])
openbis = Openbis(
url = self.url,
verify_certificates = self.verify_certificates,
allow_http_but_do_not_use_this_in_production_and_only_within_safe_networks = self.http_only
)
self.openbis = openbis
self.status = "not connected"
def is_session_active(self):
return self.openbis.is_session_active()
def check_status(self):
if self.openbis.is_session_active():
self.status = "connected"
else:
self.status = "not connected"
def login(self, username=None, password=None):
if username is None:
username=self.username
if password is None:
password=self.password
self.openbis.login(
username = username,
password = password
)
# store username and password in memory
self.username = username
self.password = password
self.status = 'connected'
def get_info(self):
mountpoint = self.openbis.get_mountpoint()
return {
'name' : self.name,
'url' : self.url,
'status' : self.status,
'username' : self.username,
'password' : "******",
'isMounted' : self.openbis.is_mounted(mountpoint),
'mountpoint': mountpoint,
}
class OpenBISConnections(IPythonHandler):
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def post(self):
"""create a new connection
:return: a new connection object
"""
data = self.get_json_body()
conn = register_connection(data)
if conn.username and conn.password:
try:
conn.login()
except Exception:
pass
self.get()
return
def get(self):
"""returns all available openBIS connections
"""
connections= []
for conn in openbis_connections.values():
conn.check_status()
connections.append(conn.get_info())
self.write({
'status' : 200,
'connections' : connections,
'notebook_dir' : self._notebook_dir()
})
return
class OpenBISConnectionHandler(IPythonHandler):
"""Handle the requests to /openbis/conn
"""
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def put(self, connection_name):
"""reconnect to a current connection
:return: an updated connection object
"""
data = self.get_json_body()
try:
conn = openbis_connections[connection_name]
except KeyError:
self.set_status(404)
self.write({
"reason" : 'No such connection: {}'.format(data)
})
return
try:
conn.login(data.get('username'), data.get('password'))
except ConnectionError:
self.set_status(500)
self.write({
"reason": "Could not establish connection to {}".format(connection_name)
})
return
except ValueError:
self.set_status(401)
self.write({
"reason": "Incorrect username or password for {}".format(connection_name)
})
return
except Exception:
self.set_status(500)
self.write({
"reason": "General Network Error"
})
self.write({
'status' : 200,
'connection' : conn.get_info(),
'' : self._notebook_dir()
})
def get(self, connection_name):
"""returns information about a connection name
"""
try:
conn = openbis_connections[connection_name]
except KeyError:
self.set_status(404)
self.write({
"reason" : 'No such connection: {}'.format(connection_name)
})
return
conn.check_status()
self.write({
'status' : 200,
'connection' : conn.get_info(),
'noteboook_dir' : self._notebook_dir()
})
return
import os
from urllib.parse import unquote
from notebook.base.handlers import IPythonHandler
from .connection import openbis_connections
class DataSetDownloadHandler(IPythonHandler):
"""Handle the requests for /openbis/dataset/connection/permId"""
def download_data(self, conn, permId, downloadPath=None):
if not conn.is_session_active():
try:
conn.login()
except Exception as exc:
self.set_status(500)
self.write({
"reason": 'connection to {} could not be established: {}'.format(conn.name, exc)
})
return
try:
dataset = conn.openbis.get_dataset(permId)
except Exception as exc:
self.set_status(404)
self.write({
"reason": 'No such dataSet found: {}'.format(permId)
})
return
# dataset was found, download the data to the disk
try:
destination = dataset.download(destination=downloadPath)
except Exception as exc:
self.set_status(500)
self.write({
"reason": 'Data for DataSet {} could not be downloaded: {}'.format(permId, exc)
})
return
# return success message
path = os.path.join(downloadPath, dataset.permId)
self.write({
'url' : conn.url,
'permId' : dataset.permId,
'path' : path,
'dataStore' : dataset.dataStore,
'location' : dataset.physicalData.location,
'size' : dataset.physicalData.size,
'files' : dataset.file_list,
'statusText': 'Data for DataSet {} was successfully downloaded to: {}.'.format(dataset.permId, path)
})
def get(self, **params):
"""Handle a request to /openbis/dataset/connection_name/permId
download the data and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(404)
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
results = self.download_data(conn=conn, permId=params['permId'], downloadPath=params['downloadPath'])
class DataSetTypesHandler(IPythonHandler):
def get(self, **params):
"""Handle a request to /openbis/datasetTypes/connection_name
This meta-metadata is used in the dataset upload dialog (uploadDialog.js)
to check data directly in the UI
Returns all datasetTypes of a given connection
- with all assigned properties
- with some details about the property types
- with the vocabulary, if exists
The result will be cached, as it is a costly operation with many fetches
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(404)
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
if getattr(conn, 'dataset_types', False):
self.write({
"dataSetTypes": conn.dataset_types
})
return
try:
dataset_types = conn.openbis.get_dataset_types()
# get all dataset types
ds_type_dicts = []
for dt in conn.openbis.get_dataset_types():
dt_dict = dt.attrs.all()
# get property assignments for every dataset-type
# and add them in the key «propertyAssignments»
pas = dt.get_property_assignments()
pa_dicts = pas.df[['propertyType','mandatory','ordinal','section']].to_dict(orient='records')
dt_dict['propertyAssignments'] = pa_dicts
for pa_dict in pa_dicts:
# add a few more attributes to the property assignments
pt = conn.openbis.get_property_type(pa_dict['propertyType'])
pa_dict['code'] = pt.code
pa_dict['label'] = pt.label
pa_dict['description'] = pt.description
pa_dict['dataType'] = pt.dataType
# add vocabulary, if exists, as key «terms»
if pt.dataType == 'CONTROLLEDVOCABULARY':
terms = conn.openbis.get_terms(pt.vocabulary)
terms_dict = terms.df[['code','label','description','official','ordinal']].to_dict(orient='records')
pa_dict['terms'] = terms_dict
ds_type_dicts.append(dt_dict)
self.write({
"dataSetTypes": ds_type_dicts
})
conn.dataset_types = ds_type_dicts
return
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason":'Could not fetch dataset-types: {}'.format(e)
})
return
class DataSetUploadHandler(IPythonHandler):
"""Handle the POST requests for /openbis/dataset/connection_name"""
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def upload_data(self, conn, data):
if not conn.is_session_active():
try:
conn.login()
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason": 'connection to {} could not be established: {}'.format(conn.name, e)
})
return
errors = []
sample = None
experiment = None
if (data.get('entityIdentifier')):
sample = None
experiment = None
try:
sample = conn.openbis.get_sample(data.get('entityIdentifier'))
except Exception as e:
pass
if sample is None:
try:
experiment = conn.openbis.get_experiment(data.get('entityIdentifier'))
except Exception as e:
experiments = conn.openbis.get_experiments(data.get('entityIdentifier'))
if len(experiments) == 1:
experiment = experiments[0]
else:
# TODO: search in all experiments with same code
# (but maybe different identifiers)
pass
if sample is None and experiment is None:
errors.append(
{
"entityIdentifier" : 'No such sample or experiment: {}'
.format(data.get('entityIdentifier'))
}
)
else:
errors.append(
{"entityIdentifier": "please provide a sample or experiment identifier"}
)
parents = []
if data.get('parents'):
parents = data.get('parents')
for parent in parents:
try:
conn.openbis.get_dataset(parent)
except Exception as e:
errors.append({
"parent": "Parent DataSet not found: {}".format(parent)
})
filenames = []
notebook_dir = self._notebook_dir()
for filename in data.get('files'):
filename = unquote(filename)
full_filename_path = os.path.join(notebook_dir, filename)
if os.path.isfile(full_filename_path):
filenames.append(full_filename_path)
else:
errors.append({
"file": "File not found: {}".format(full_filename_path)
})
try:
dataset = conn.openbis.new_dataset(
type = data.get('type'),
sample = sample,
parents = parents,
experiment = experiment,
files = filenames,
)
except Exception as e:
self.set_status(500)
self.write({
"reason": 'Error while creating the dataset: {}'.format(e)
})
# try to set the properties
if data.get('props'):
props = data.get('props')
for prop, value in props.items():
try:
setattr(dataset.props, prop.lower(), value)
except Exception as e:
errors.append({
"prop."+prop : str(e)
})
# check if any mandatory property is missing
for prop_name, prop in dataset.props._property_names.items():
if prop['mandatory']:
if getattr(dataset.props, prop_name) is None or getattr(dataset.props, prop_name) == "":
errors.append({
"prop."+prop_name : "is mandatory"
})
# write errors back if already occured
if errors:
self.set_status(500)
self.write({"errors": errors})
return
try:
dataset.save()
except Exception as e:
errors.append({
"save": 'Error while saving the dataset: {}'.format(e)
})
# write errors back if they occured
if errors:
self.set_status(500)
self.write({"errors": errors})
else:
# ...or return a success message
self.write({
'status': 200,
'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId)
})
print('Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId))
def post(self, **params):
"""Handle a request to /openbis/dataset/connection_name/permId
download the data and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
data = self.get_json_body()
self.upload_data(conn=conn,data=data)
from notebook.utils import url_path_join
import os
import yaml
from .connection import OpenBISConnections, OpenBISConnectionHandler, register_connection
from .dataset import DataSetTypesHandler, DataSetDownloadHandler, DataSetUploadHandler
from .sample import SampleHandler
from .requirements import Requirements
def _jupyter_server_extension_paths():
return [{'module': 'jupyter-openbis-server.server'}]
def _load_configuration(paths, filename='openbis-connections.yaml'):
if paths is None:
paths = []
home = os.path.expanduser("~")
paths.append(os.path.join(home, '.jupyter'))
# look in all config file paths of jupyter
# for openbis connection files and load them
connections = []
for path in paths:
abs_filename = os.path.join(path, filename)
if os.path.isfile(abs_filename):
with open(abs_filename, 'r') as stream:
try:
config = yaml.safe_load(stream)
for connection in config['connections']:
connections.append(connection)
except yaml.YAMLexception as e:
print(e)
return None
return connections
def load_jupyter_server_extension(nb_server_app):
"""Call when the extension is loaded.
:param nb_server_app: Handle to the Notebook webserver instance.
"""
# load the configuration file
# and register the openBIS connections.
# If username and password is available, try to connect to the server
connections = _load_configuration(
paths = nb_server_app.config_file_paths,
filename = 'openbis-connections.yaml'
)
for connection_info in connections:
conn = register_connection(connection_info)
print("Registered: {}".format(conn.url))
if conn.username and conn.password:
try:
conn.login()
print("Successfully connected to: {}".format(conn.url))
except ValueError:
print("Incorrect username or password for: {}".format(conn.url))
except Exception:
print("Cannot establish connection to: {}".format(conn.url))
if "OPENBIS_URL" in os.environ and "OPENBIS_TOKEN" in os.environ:
from urllib.parse import urlparse
up = urlparse(os.environ["OPENBIS_URL"])
match = re.search(r'(?P<username>.*)-.*', os.environ["OPENBIS_TOKEN"])
username = match.groupdict()['username']
connection_info = {
"name": up.hostname,
"url": os.environ["OPENBIS_URL"],
"verify_certificates" : False,
"username" : username,
}
conn = register_connection(connection_info)
conn.token = os.environ["OPENBIS_TOKEN"]
# Add URL handlers to our web_app
# see Tornado documentation: https://www.tornadoweb.org
web_app = nb_server_app.web_app
host_pattern = '.*$'
base_url = web_app.settings['base_url']
# DataSet download
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/dataset/(?P<connection_name>.*)?/(?P<permId>.*)?/(?P<downloadPath>.*)'
),
DataSetDownloadHandler
)]
)
# DataSet upload
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url, '/openbis/dataset/(?P<connection_name>.*)'
),
DataSetUploadHandler
)]
)
# DataSet-Types
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url, '/openbis/datasetTypes/(?P<connection_name>.*)'
),
DataSetTypesHandler
)]
)
# DataSets for Sample identifier/permId
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/sample/(?P<connection_name>.*)?/(?P<identifier>.*)'
),
SampleHandler
)]
)
# OpenBIS connections
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/conns'
),
OpenBISConnections
)]
)
# Modify / reconnect to a connection
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/conn/(?P<connection_name>.*)'
),
OpenBISConnectionHandler
)]
)
# OpenBIS connections
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/requirements'
),
Requirements
)]
)
from notebook.base.handlers import IPythonHandler
import os
class Requirements(IPythonHandler):
"""Handle the requests for /requirements"""
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def post(self, **params):
data = self.get_json_body()
#print(data)
notebook_dir = self._notebook_dir()
if 'notebook_path' in data:
notebook_dir = os.path.dirname(os.path.join(notebook_dir, data['notebook_path']))
#print("*********notebook_dir: {}".format(notebook_dir))
if 'requirements_list' in data and 'requirements_filename' in data:
req_filename = os.path.join(notebook_dir, data['requirements_filename'])
#print("*********req_filename: {}".format(req_filename))
with open(req_filename, 'w') as fh:
fh.write(data['requirements_list'])
if 'runtime' in data and 'runtime_filename' in data:
runtime_filename = os.path.join(notebook_dir, data['runtime_filename'])
#print("**********runtime_filename: {}".format(runtime_filename))
with open(runtime_filename, 'w') as fh:
fh.write(data['runtime'])
from notebook.base.handlers import IPythonHandler
import numpy as np
import os
from .connection import openbis_connections
from urllib.parse import parse_qs
def get_entity_for_identifier(conn, identifier):
entity = None
try:
entity = conn.openbis.get_sample(identifier)
except Exception as exc:
pass
if entity is None:
try:
entity = conn.openbis.get_experiment(identifier)
except Exception as exc:
pass
return entity
def get_datasets(entity, start_with=None, count=None):
datasets = entity.get_datasets(start_with=start_with, count=count)
totalCount = datasets.totalCount
df = datasets.df
df.replace({np.nan:None}, inplace=True) # replace NaN with None, otherwise we cannot convert it correctly
datasets_dict = df.to_dict(orient='records') # is too stupid to handle NaN
return {
"datasets_dict": datasets_dict,
"totalCount": totalCount
}
def get_datasets_for_identifier(conn, identifier, start_with=None, count=None):
datasets = conn.openbis.get_datasets(sample=identifier, start_with=start_with, count=count)
if len(datasets) == 0:
datasets = conn.openbis.get_datasets(experiment=identifier, start_with=start_with, count=count)
totalCount = datasets.totalCount
df = datasets.df
df.replace({np.nan:None}, inplace=True) # replace NaN with None, otherwise we cannot convert it correctly
datasets_dict = df.to_dict(orient='records') # is too stupid to handle NaN
return {
"datasets_dict": datasets_dict,
"totalCount": totalCount
}
class SampleHandler(IPythonHandler):
"""Handle the requests for /openbis/sample/connection/permId"""
def get(self, **params):
"""Handle a request to /openbis/sample/connection_name/permId
download the dataset list and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(500)
self.write({
"reason" : 'connection {} was not found'.format(
params['connection_name']
)
})
return
if not conn.is_session_active():
try:
conn.login()
except Exception as exc:
self.set_status(500)
self.write({
"reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
})
return
querystring = parse_qs(self.request.query)
start_with = querystring.get('start_with', ['0'])[0]
count = querystring.get('count', ['10'])[0]
#entity = get_entity_for_identifier(conn, params['identifier'])
datasets = get_datasets_for_identifier(
conn, params['identifier'],
start_with=start_with, count=count
)
#if datasets is None:
# self.set_status(404)
# self.write({
# "reason" : 'No such Sample or Experiment: {}'.format(params['identifier'])
# })
# return None
#datasets = get_datasets(entity, start_with=start_with, count=count)
self.set_status(200)
self.write({
"dataSets" : datasets.get('datasets_dict'),
#"entity_attrs": entity.attrs.all(),
#"entity_props": entity.props.all(),
"start_with" : start_with,
"count" : count,
"totalCount" : datasets.get('totalCount'),
"cwd" : os.getcwd()
})
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment