Skip to content
Snippets Groups Projects
Commit 2a7bdc83 authored by Swen Vermeul's avatar Swen Vermeul
Browse files

Merge tag '0.5.0' into develop

splitted into two projects: jupyter-openbis-extension and jupyter-openbis-server.
parents 4ec17696 f8da80dc
No related branches found
No related tags found
No related merge requests found
## new in jupyter-openbis-extension 0.5.0
- splitted project into jupyter-openbis-server and jupyter-openbis-extension
- jupyter-openbis-server is the Python-part which talks to both openBIS and the Jupyter extension
- jupyter-opnebis-server is also used by the jupyterlab-openbis notebook extension
## new in jupyter-openbis-extension 0.4.0
- made it compatible to pybis-1.9.x
......
......@@ -5,7 +5,7 @@ include *.md
include LICENSE
# Include all files
recursive-include jupyter-openbis-extension *.py *.js
recursive-include jupyter-openbis-extension *.js
recursive-include jupyter-config *.json
recursive-include jupyter-openbis-extension/static *.js
{
"NotebookApp": {
"nbserver_extensions": {
"jupyter-openbis-extension.server": true
}
}
}
name = 'jupyter-openbis-extension.server'
__author__ = 'Swen Vermeul'
__email__ = 'swen@ethz.ch'
__version__ = '0.4.0'
def _jupyter_server_extension_paths():
return [{
"module": "jupyter-openbis-extension.server"
}]
# Jupyter Extension points
def _jupyter_nbextension_paths():
return [{
'name': 'jupyter-openbis-extension',
'label': 'Jupyter openBIS extension',
'section': "notebook",
# # the path relative to the `jupyter-openbis-extension` directory containing the JavaScript
'src': "static",
# # directory in the `nbextension/` namespace
'dest': "openbis",
# _also_ in the `nbextension/` namespace
'require' : "openbis/main"
}]
def load_jupyter_server_extension(nbapp):
nbapp.log.info("jupyter-openbis-extension module enabled!")
import os
from pybis import Openbis
from notebook.base.handlers import IPythonHandler
openbis_connections = {}
def register_connection(connection_info):
conn = OpenBISConnection(
name = connection_info.get('name'),
url = connection_info.get('url'),
verify_certificates = connection_info.get('verify_certificates', False),
username = connection_info.get('username'),
password = connection_info.get('password'),
http_only = connection_info.get('http_only', False),
status = 'not connected',
)
openbis_connections[conn.name] = conn
return conn
class OpenBISConnection:
"""register an openBIS connection
"""
def __init__(self, **kwargs):
for needed_key in ['name', 'url']:
if needed_key not in kwargs:
raise KeyError("{} is missing".format(needed_key))
for key in kwargs:
setattr(self, key, kwargs[key])
openbis = Openbis(
url = self.url,
verify_certificates = self.verify_certificates,
allow_http_but_do_not_use_this_in_production_and_only_within_safe_networks = self.http_only
)
self.openbis = openbis
self.status = "not connected"
def is_session_active(self):
return self.openbis.is_session_active()
def check_status(self):
if self.openbis.is_session_active():
self.status = "connected"
else:
self.status = "not connected"
def login(self, username=None, password=None):
if username is None:
username=self.username
if password is None:
password=self.password
self.openbis.login(
username = username,
password = password
)
# store username and password in memory
self.username = username
self.password = password
self.status = 'connected'
def get_info(self):
return {
'name' : self.name,
'url' : self.url,
'status' : self.status,
'username': self.username,
'password': self.password,
}
class OpenBISConnections(IPythonHandler):
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def post(self):
"""create a new connection
:return: a new connection object
"""
data = self.get_json_body()
conn = register_connection(data)
if conn.username and conn.password:
try:
conn.login()
except Exception:
pass
self.get()
return
def get(self):
"""returns all available openBIS connections
"""
connections= []
for conn in openbis_connections.values():
conn.check_status()
connections.append(conn.get_info())
self.write({
'status' : 200,
'connections' : connections,
'notebook_dir' : self._notebook_dir()
})
return
class OpenBISConnectionHandler(IPythonHandler):
"""Handle the requests to /openbis/conn
"""
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def put(self, connection_name):
"""reconnect to a current connection
:return: an updated connection object
"""
data = self.get_json_body()
try:
conn = openbis_connections[connection_name]
except KeyError:
self.set_status(404)
self.write({
"reason" : 'No such connection: {}'.format(data)
})
return
try:
conn.login(data.get('username'), data.get('password'))
except ConnectionError:
self.set_status(500)
self.write({
"reason": "Could not establish connection to {}".format(connection_name)
})
return
except ValueError:
self.set_status(401)
self.write({
"reason": "Incorrect username or password for {}".format(connection_name)
})
return
except Exception:
self.set_status(500)
self.write({
"reason": "General Network Error"
})
self.write({
'status' : 200,
'connection' : conn.get_info(),
'' : self._notebook_dir()
})
def get(self, connection_name):
"""returns information about a connection name
"""
try:
conn = openbis_connections[connection_name]
except KeyError:
self.set_status(404)
self.write({
"reason" : 'No such connection: {}'.format(connection_name)
})
return
conn.check_status()
self.write({
'status' : 200,
'connection' : conn.get_info(),
'noteboook_dir' : self._notebook_dir()
})
return
import os
from urllib.parse import unquote
from notebook.base.handlers import IPythonHandler
from .connection import openbis_connections
class DataSetDownloadHandler(IPythonHandler):
"""Handle the requests for /openbis/dataset/connection/permId"""
def download_data(self, conn, permId, downloadPath=None):
if not conn.is_session_active():
try:
conn.login()
except Exception as exc:
self.set_status(500)
self.write({
"reason": 'connection to {} could not be established: {}'.format(conn.name, exc)
})
return
try:
dataset = conn.openbis.get_dataset(permId)
except Exception as exc:
self.set_status(404)
self.write({
"reason": 'No such dataSet found: {}'.format(permId)
})
return
# dataset was found, download the data to the disk
try:
destination = dataset.download(destination=downloadPath)
except Exception as exc:
self.set_status(500)
self.write({
"reason": 'Data for DataSet {} could not be downloaded: {}'.format(permId, exc)
})
return
# return success message
path = os.path.join(downloadPath, dataset.permId)
self.write({
'url' : conn.url,
'permId' : dataset.permId,
'path' : path,
'dataStore' : dataset.dataStore,
'location' : dataset.physicalData.location,
'size' : dataset.physicalData.size,
'files' : dataset.file_list,
'statusText': 'Data for DataSet {} was successfully downloaded to: {}.'.format(dataset.permId, path)
})
def get(self, **params):
"""Handle a request to /openbis/dataset/connection_name/permId
download the data and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(404)
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
results = self.download_data(conn=conn, permId=params['permId'], downloadPath=params['downloadPath'])
class DataSetTypesHandler(IPythonHandler):
def get(self, **params):
"""Handle a request to /openbis/datasetTypes/connection_name
This meta-metadata is used in the dataset upload dialog (uploadDialog.js)
to check data directly in the UI
Returns all datasetTypes of a given connection
- with all assigned properties
- with some details about the property types
- with the vocabulary, if exists
The result will be cached, as it is a costly operation with many fetches
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(404)
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
if getattr(conn, 'dataset_types', False):
self.write({
"dataSetTypes": conn.dataset_types
})
return
try:
dataset_types = conn.openbis.get_dataset_types()
# get all dataset types
ds_type_dicts = []
for dt in conn.openbis.get_dataset_types():
dt_dict = dt.attrs.all()
# get property assignments for every dataset-type
# and add them in the key «propertyAssignments»
pas = dt.get_property_assignments()
pa_dicts = pas.df[['propertyType','mandatory','ordinal','section']].to_dict(orient='records')
dt_dict['propertyAssignments'] = pa_dicts
for pa_dict in pa_dicts:
# add a few more attributes to the property assignments
pt = conn.openbis.get_property_type(pa_dict['propertyType'])
pa_dict['code'] = pt.code
pa_dict['label'] = pt.label
pa_dict['description'] = pt.description
pa_dict['dataType'] = pt.dataType
# add vocabulary, if exists, as key «terms»
if pt.dataType == 'CONTROLLEDVOCABULARY':
terms = conn.openbis.get_terms(pt.vocabulary)
terms_dict = terms.df[['code','label','description','official','ordinal']].to_dict(orient='records')
pa_dict['terms'] = terms_dict
ds_type_dicts.append(dt_dict)
self.write({
"dataSetTypes": ds_type_dicts
})
conn.dataset_types = ds_type_dicts
return
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason":'Could not fetch dataset-types: {}'.format(e)
})
return
class DataSetUploadHandler(IPythonHandler):
"""Handle the POST requests for /openbis/dataset/connection_name"""
def _notebook_dir(self):
notebook_dir = os.getcwd()
if 'SingleUserNotebookApp' in self.config and 'notebook_dir' in self.config.SingleUserNotebookApp:
notebook_dir = self.config.SingleUserNotebookApp.notebook_dir
elif 'notebook_dir' in self.config.NotebookApp:
notebook_dir = self.config.NotebookApp.notebook_dir
return notebook_dir
def upload_data(self, conn, data):
if not conn.is_session_active():
try:
conn.login()
except Exception as e:
print(e)
self.set_status(500)
self.write({
"reason": 'connection to {} could not be established: {}'.format(conn.name, e)
})
return
errors = []
sample = None
experiment = None
if (data.get('entityIdentifier')):
sample = None
experiment = None
try:
sample = conn.openbis.get_sample(data.get('entityIdentifier'))
except Exception as e:
pass
if sample is None:
try:
experiment = conn.openbis.get_experiment(data.get('entityIdentifier'))
except Exception as e:
pass
if sample is None and experiment is None:
errors.append(
{
"entityIdentifier" : 'No such sample or experiment: {}'
.format(data.get('entityIdentifier'))
}
)
else:
errors.append(
{"entityIdentifier": "please provide a sample or experiment identifier"}
)
parents = []
if data.get('parents'):
parents = data.get('parents')
for parent in parents:
try:
conn.openbis.get_dataset(parent)
except Exception as e:
errors.append({
"parent": "Parent DataSet not found: {}".format(parent)
})
filenames = []
notebook_dir = self._notebook_dir()
for filename in data.get('files'):
filename = unquote(filename)
full_filename_path = os.path.join(notebook_dir, filename)
if os.path.isfile(full_filename_path):
filenames.append(full_filename_path)
else:
errors.append({
"file": "File not found: {}".format(full_filename_path)
})
try:
dataset = conn.openbis.new_dataset(
type = data.get('type'),
sample = sample,
parents = parents,
experiment = experiment,
files = filenames,
)
except Exception as e:
self.set_status(500)
self.write({
"reason": 'Error while creating the dataset: {}'.format(e)
})
# try to set the properties
if data.get('props'):
props = data.get('props')
for prop, value in props.items():
try:
setattr(dataset.props, prop.lower(), value)
except Exception as e:
errors.append({
"prop."+prop : str(e)
})
# check if any mandatory property is missing
for prop_name, prop in dataset.props._property_names.items():
if prop['mandatory']:
if getattr(dataset.props, prop_name) is None or getattr(dataset.props, prop_name) == "":
errors.append({
"prop."+prop_name : "is mandatory"
})
# write errors back if already occured
if errors:
self.set_status(500)
self.write({"errors": errors})
return
try:
dataset.save()
except Exception as e:
errors.append({
"save": 'Error while saving the dataset: {}'.format(e)
})
# write errors back if they occured
if errors:
self.set_status(500)
self.write({"errors": errors})
else:
# ...or return a success message
self.write({
'status': 200,
'statusText': 'Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId)
})
print('Jupyter Notebook was successfully uploaded to: {} with permId: {}'.format(conn.name, dataset.permId))
def post(self, **params):
"""Handle a request to /openbis/dataset/connection_name/permId
download the data and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.write({
"reason": 'connection {} was not found'.format(params['connection_name'])
})
return
data = self.get_json_body()
self.upload_data(conn=conn,data=data)
from notebook.base.handlers import IPythonHandler
import numpy as np
import os
from .connection import openbis_connections
from urllib.parse import parse_qs
def get_entity_for_identifier(conn, identifier):
entity = None
try:
entity = conn.openbis.get_sample(identifier)
except Exception as exc:
pass
if entity is None:
try:
entity = conn.openbis.get_experiment(identifier)
except Exception as exc:
pass
return entity
def get_datasets(entity, start_with=None, count=None):
datasets = entity.get_datasets(start_with=start_with, count=count)
totalCount = datasets.totalCount
df = datasets.df
df.replace({np.nan:None}, inplace=True) # replace NaN with None, otherwise we cannot convert it correctly
datasets_dict = df.to_dict(orient='records') # is too stupid to handle NaN
return {
"datasets_dict": datasets_dict,
"totalCount": totalCount
}
class SampleHandler(IPythonHandler):
"""Handle the requests for /openbis/sample/connection/permId"""
def get(self, **params):
"""Handle a request to /openbis/sample/connection_name/permId
download the dataset list and return a message
"""
try:
conn = openbis_connections[params['connection_name']]
except KeyError:
self.set_status(500)
self.write({
"reason" : 'connection {} was not found'.format(
params['connection_name']
)
})
return
if not conn.is_session_active():
try:
conn.login()
except Exception as exc:
self.set_status(500)
self.write({
"reason" : 'connection to {} could not be established: {}'.format(conn.name, exc)
})
return
entity = get_entity_for_identifier(conn, params['identifier'])
if entity is None:
self.set_status(404)
self.write({
"reason" : 'No such Sample or Experiment: {}'.format(params['identifier'])
})
return None
querystring = parse_qs(self.request.query)
start_with = querystring.get('start_with', ['0'])[0]
count = querystring.get('count', ['10'])[0]
datasets = get_datasets(entity, start_with=start_with, count=count)
if datasets is not None:
self.set_status(200)
self.write({
"dataSets" : datasets.get('datasets_dict'),
"entity_attrs": entity.attrs.all(),
"entity_props": entity.props.all(),
"start_with" : start_with,
"count" : count,
"totalCount" : datasets.get('totalCount'),
"cwd" : os.getcwd()
})
from notebook.utils import url_path_join
import os
import yaml
from .connection import OpenBISConnections, OpenBISConnectionHandler, register_connection
from .dataset import DataSetTypesHandler, DataSetDownloadHandler, DataSetUploadHandler
from .sample import SampleHandler
def _jupyter_server_extension_paths():
return [{'module': 'jupyter-openbis-extension.server'}]
def _load_configuration(paths, filename='openbis-connections.yaml'):
if paths is None:
paths = []
home = os.path.expanduser("~")
paths.append(os.path.join(home, '.jupyter'))
# look in all config file paths of jupyter
# for openbis connection files and load them
connections = []
for path in paths:
abs_filename = os.path.join(path, filename)
if os.path.isfile(abs_filename):
with open(abs_filename, 'r') as stream:
try:
config = yaml.safe_load(stream)
for connection in config['connections']:
connections.append(connection)
except yaml.YAMLexception as e:
print(e)
return None
return connections
def load_jupyter_server_extension(nb_server_app):
"""Call when the extension is loaded.
:param nb_server_app: Handle to the Notebook webserver instance.
"""
# load the configuration file
# and register the openBIS connections.
# If username and password is available, try to connect to the server
connections = _load_configuration(
paths = nb_server_app.config_file_paths,
filename = 'openbis-connections.yaml'
)
for connection_info in connections:
conn = register_connection(connection_info)
print("Registered: {}".format(conn.url))
if conn.username and conn.password:
try:
conn.login()
print("Successfully connected to: {}".format(conn.url))
except ValueError:
print("Incorrect username or password for: {}".format(conn.url))
except Exception:
print("Cannot establish connection to: {}".format(conn.url))
# Add URL handlers to our web_app
# see Tornado documentation: https://www.tornadoweb.org
web_app = nb_server_app.web_app
host_pattern = '.*$'
base_url = web_app.settings['base_url']
# DataSet download
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/dataset/(?P<connection_name>.*)?/(?P<permId>.*)?/(?P<downloadPath>.*)'
),
DataSetDownloadHandler
)]
)
# DataSet upload
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url, '/openbis/dataset/(?P<connection_name>.*)'
),
DataSetUploadHandler
)]
)
# DataSet-Types
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url, '/openbis/datasetTypes/(?P<connection_name>.*)'
),
DataSetTypesHandler
)]
)
# DataSets for Sample identifier/permId
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/sample/(?P<connection_name>.*)?/(?P<identifier>.*)'
),
SampleHandler
)]
)
# OpenBIS connections
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/conns'
),
OpenBISConnections
)]
)
# Modify / reconnect to a connection
web_app.add_handlers(
host_pattern, [(
url_path_join(
base_url,
'/openbis/conn/(?P<connection_name>.*)'
),
OpenBISConnectionHandler
)]
)
define([],
function () {
// noinspection JSAnnotator
return {
// connection dialog
connection: {
......@@ -26,7 +27,15 @@ define([],
entity: null,
// openBIS v3 connection
openbisService : null
openbisService : null,
//runtime environment (e.g. Python version) and requirements (package list)
requirements_list : null,
requirements_filename : null,
runtime_filename : null,
runtime : null
}
}
)
\ No newline at end of file
......@@ -270,20 +270,25 @@ define([
filenameCell.style.width = "100%"
filenameCell.style.cursor = "pointer"
var checkbox = document.createElement("INPUT")
checkbox.type = "checkbox"
checkbox.value = file.path
checkbox.checked = state.selectedFiles.includes(file.path) ? true: false
checkbox.onclick = registerFile
checkboxes.push(checkbox)
checkboxCell.appendChild(checkbox)
if (file.type === "directory") {
iconCell.className = "item_icon folder_icon icon-fixed-width"
filenameCell.onclick = function () {
get_file_list(env, container, file.path)
}
iconCell.onclick = function () {
checkbox.checked = !checkbox.checked
registerFile.call(checkbox)
}
}
else {
var checkbox = document.createElement("INPUT")
checkbox.type = "checkbox"
checkbox.value = file.path
checkbox.checked = state.selectedFiles.includes(file.path) ? true: false
checkbox.onclick = registerFile
checkboxes.push(checkbox)
checkboxCell.appendChild(checkbox)
if (file.type === "notebook") {
iconCell.className = "item_icon notebook_icon icon-fixed-width"
......@@ -409,7 +414,7 @@ define([
if (! files.includes(env.notebook.notebook_path)) {
files.push(env.notebook.notebook_path)
}
console.log(files)
//console.log(files)
var props = {}
for (input of $('#upload-input-fields').find('input')) {
......@@ -494,6 +499,89 @@ define([
keyboard_manager: env.notebook.keyboard_manager
}
var notebook = IPython.notebook
var code_for_requirements = ""
var requirements_filename = ""
var code_for_runtime = ""
var runtime_filename = ""
if (notebook.metadata.kernelspec.language == "python") {
code_for_requirements = "import pkg_resources; print(\"\\n\".join([\"{}=={}\".format(i.key, i.version) for i in pkg_resources.working_set]))"
code_for_runtime = "import sys; print('python-' + str(sys.version_info[0]) + '.' + str(sys.version_info[1]))"
requirements_filename = "requirements.txt"
runtime_filename = "runtime.txt"
}
else if (notebook.metadata.kernelspec.language == "R") {
code_for_requirements = ""
requirements_filename = "runtime.txt"
}
function save_requirements(data) {
notebook.metadata.language_info.requirements_list = data
notebook.metadata.language_info.requirements_filename = requirements_filename
state.requirements_list = data
state.requirements_filename = requirements_filename
}
function save_runtime(data) {
notebook.metadata.language_info.runtime = data
notebook.metadata.language_info.runtime_filename = runtime_filename
state.runtime = data
state.runtime_filename = runtime_filename
}
var req_callback = {
iopub: {
output: (data) => save_requirements(data.content.text.trim())
}
};
var rt_callback = {
iopub: {
output: (data) => save_runtime(data.content.text.trim())
}
};
var kernel = IPython.notebook.kernel
kernel.execute(code_for_requirements, req_callback)
kernel.execute(code_for_runtime, rt_callback)
function send_runtime_requirements(state) {
var endpoint = env.notebook.base_url + 'requirements'
var notebook_path = IPython.notebook.notebook_path
body = {
"notebook_path": notebook_path,
"requirements_list": state.requirements_list,
"requirements_filename": state.requirements_filename,
"runtime": state.runtime,
"runtime_filename": state.runtime_filename
}
var xsrf_token = common.getCookie('_xsrf')
fetch(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-XSRFToken": xsrf_token,
"credentials": "same-origin",
},
body: JSON.stringify(body)
})
.then(function (response) {
if (response.ok) {
//alert(response.status)
}
else {
//alert(response.status)
}
})
.catch(
error => console.error(
"Error while attempting to write requirement files: ", error
)
)
}
send_runtime_requirements(state)
if (env.notebook.dirty === true) {
env.notebook.save_notebook()
.then(function () {
......
......@@ -11,7 +11,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
setup(
name='jupyter-openbis-extension',
version= '0.4.0',
version= '0.5.0',
author='Swen Vermeul | ID SIS | ETH Zürich',
author_email='swen@ethz.ch',
description='Extension for Jupyter notebooks to connect to openBIS and download/upload datasets, inluding the notebook itself',
......@@ -23,7 +23,7 @@ setup(
install_requires=[
'jupyter-nbextensions-configurator',
'jupyter',
'pybis>=1.9.5',
'jupyter-openbis-server',
'numpy',
'tornado==5.1.1',
],
......@@ -113,10 +113,6 @@ setup(
("etc/jupyter/nbconfig/notebook.d", [
"jupyter-config/nbconfig/notebook.d/jupyter_openbis_extension.json"
]),
# like `jupyter serverextension enable --sys-prefix`
("etc/jupyter/jupyter_notebook_config.d", [
"jupyter-config/jupyter_notebook_config.d/jupyter_openbis_extension.json"
])
],
zip_safe=False,
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment