diff --git a/src/core-plugins/jupyter-api/1/as/initialize-master-data.py b/src/core-plugins/jupyter-api/1/as/initialize-master-data.py
deleted file mode 100644
index ff8b7629d7cbbf8bad029b2c79c9b6e1483d1c9f..0000000000000000000000000000000000000000
--- a/src/core-plugins/jupyter-api/1/as/initialize-master-data.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#
-# Copyright 2016 ETH Zuerich, Scientific IT Services
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-##
-## Configuration
-##
-import sys
-
-# MasterDataRegistrationTransaction Class
-import os
-import ch.systemsx.cisd.openbis.generic.server.jython.api.v1.DataType as DataType
-
-##
-## Globals
-##
-propertiesCache = {};
-samplesCache = {};
-tr = service.transaction();
-
-##
-## API Facade
-##
-	
-	
-def createDataSetTypeWithProperties(dataSetCode, description, properties):
-	newDataSet = tr.getOrCreateNewDataSetType(dataSetCode);
-	newDataSet.setDescription(description);
-	addProperties(newDataSet, properties);
-	
-def addProperties(entity, properties):
-	for property in properties:
-		addProperty(entity, property[0], property[1], property[2], property[3], property[4], property[5], property[6], property[7]);
-	
-def addProperty(entity, propertyCode, section, propertyLabel, dataType, vocabularyCode, propertyDescription, managedScript, dynamicScript):
-	property = None;
-	
-	if propertyCode in propertiesCache:
-		property = propertiesCache[propertyCode];
-	else:
-		property = createProperty(propertyCode, dataType, propertyLabel, propertyDescription, vocabularyCode);
-	
-	propertyAssignment = tr.assignPropertyType(entity, property);
-	if section is not None:
-		propertyAssignment.setSection(section);
-	propertyAssignment.setShownEdit(True);
-	
-	if managedScript != None:
-		propertyAssignment.setManaged(True);
-		propertyAssignment.setScriptName(managedScript);
-	if dynamicScript != None:
-		propertyAssignment.setDynamic(True);
-		propertyAssignment.setShownEdit(False);
-		propertyAssignment.setScriptName(dynamicScript);
-
-def createProperty(propertyCode, dataType, propertyLabel, propertyDescription, vocabularyCode):
-	property = tr.getOrCreateNewPropertyType(propertyCode, dataType);
-	property.setDescription(propertyDescription);
-	property.setLabel(propertyLabel);
-	propertiesCache[propertyCode] = property;
-	if dataType == DataType.CONTROLLEDVOCABULARY:
-		property.setVocabulary(vocabulariesCache[vocabularyCode]);
-	return property;
-
-def initJupyterMasterData():
-	##
-	## Property Types for annotations
-	##
-		
-	##
-	## DataSet Types
-	##
-	createDataSetTypeWithProperties("JUPYTER_CONTAINER", "Jupyter Analysis Results", [
-		["NAME", None, "Name", DataType.VARCHAR, None,	"Name", None, None],
-		["DESCRIPTION", None, "Description", DataType.MULTILINE_VARCHAR, None, "A Description", None, None],
-	]);
-	
-	createDataSetTypeWithProperties("JUPYTER_RESULT", "Analysis Results Files", []);
-	createDataSetTypeWithProperties("JUPYTER_NOTEBOOK", "Analysis Notebook Files", []);
-	
-	
-initJupyterMasterData();
\ No newline at end of file
diff --git a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/dataset-uploader-api.py b/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/dataset-uploader-api.py
deleted file mode 100644
index f34791fcf099a27701b1b000a500e7037c76b5e6..0000000000000000000000000000000000000000
--- a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/dataset-uploader-api.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#
-# Copyright 2016 ETH Zuerich, Scientific IT Services
-#
-# Licensed under the Apache License, Version 2.0 (the "License")
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# IDataSetRegistrationTransactionV2 Class
-from ch.systemsx.cisd.openbis.generic.shared.api.v1.dto import SearchCriteria
-from ch.systemsx.cisd.openbis.generic.shared.api.v1.dto.SearchCriteria import MatchClause, SearchOperator, MatchClauseAttribute
-from ch.systemsx.cisd.openbis.dss.generic.shared import ServiceProvider
-
-from org.apache.commons.io import IOUtils
-from java.io import File
-from java.io import FileOutputStream
-from java.lang import System
-#from net.lingala.zip4j.core import ZipFile
-from ch.systemsx.cisd.common.exceptions import UserFailureException
-
-import time
-import subprocess
-import os
-import re
-import sys
-import shutil
-import errno
-
-
-def getSampleByIdentifier(transaction, identifier):
-    sample = transaction.getSampleForUpdate(identifier)
-    if sample is None:
-        raise UserFailureException("no sample found with this identifier: {}".format(identifier))
-    return sample
-
-def getExperimentByIdentifier(transaction, identifier):
-    experiment = transaction.getExperimentForUpdate(identifier)
-    if experiment is None:
-        raise UserFailureException("no experiment found with this identifier: {}".format(identifier))
-
-    return experiment
-
-def get_dataset_for_name(transaction, dataset_name):
-
-    search_service = transaction.getSearchService()
-    criteria = SearchCriteria()
-    criteria.addMatchClause(MatchClause.createPropertyMatch('NAME', dataset_name))
-    found = list(search_service.searchForDataSets(criteria))
-    if len(found) == 1:
-        #print("DataSetCode of found dataset = " + found[0].getDataSetCode())
-        return transaction.getDataSetForUpdate(found[0].getDataSetCode())
-    else:
-        return None
-
-
-def get_dataset_for_permid(transaction, permid):
-
-    search_service = transaction.getSearchService()
-    criteria = SearchCriteria()
-    criteria.addMatchClause(MatchClause.createAttributeMatch(MatchClauseAttribute.CODE, permid));
-
-    found = list(search_service.searchForDataSets(criteria))
-    if len(found) == 1:
-        return found[0]
-    else:
-        return None
-
-
-def process(transaction, parameters, tableBuilder):
-    ''' 
-    This method is called from openBIS DSS.
-    The transaction object has a number of methods described in ...
-    The parameters are passed with the createReportFromAggregationService method
-    and need to be accessed like this:
-       parameters.get('my_param')
-    tableBuilder is needed to create an appropiate return message.
-    A number of magic variables are present, described in PluginScriptRunnerFactory:
-    - userSessionToken : the Session Token used by every call
-    - userId           : the username
-    - searchService    :
-    - searchServiceUnfiltered :
-    - queryService     :
-    - mailService      :
-    - authorizationService :
-    - contentProvider  :
-    - contentProviderUnfiltered 
-
-    '''
-    transaction.setUserId(userId)
-    #print(dir())
-    ## any print statements is written to openbis/servers/datastore_server/log/startup_log.txt
-    #print('userSessionToken: ' + userSessionToken)
-
-
-    # get sample to connect the container to
-    sample = None
-    sampleId = parameters.get("sampleId")
-    if sampleId is not None:
-        #print('looking for sample with identifier: ' + sampleId['identifier'])
-        sample = getSampleByIdentifier(transaction, sampleId)
-
-    experiment = None
-    experimentId = parameters.get("experimentId")
-    if experimentId is not None:
-        #print('looking for experiment with identifier: ' + sampleId['identifier'])
-        experiment = getExperimentByIdentifier(transaction, experimentId)
-
-    if sample is None and experiment is None:
-        raise UserFailureException("to create a DataSet, either a sampleId or an experimentId must be present")
-
-    parent_datasets = []
-    if parameters.get('parentIds') is not None:
-        for parentId in parameters.get('parentIds'):
-            parent_datasets.append(parentId)
-    #print("parent_datasets = " + str(parent_datasets))
-
-    everything_ok = True
-
-    permId = None
-    dataset_codes= []
-
-    if parameters.get("dataSets") is not None:
-        for ds in parameters.get("dataSets"):
-            dataset_code = register_dataset(
-                transaction, 
-                ds.get("dataSetType"),
-                sample, 
-                experiment,
-                ds.get("parentIds"),
-                ds.get("properties"),
-                ds.get("sessionWorkspaceFolder"),
-                ds.get("fileNames"),
-                ds.get("folder")
-            )
-            dataset_codes.append(dataset_code)
-
-
-    # put the newly created dataset into a container
-    if parameters.get("containers") is not None:
-        #print("...creating container...")
-        for container in parameters.get("containers"):
-            new_cont = register_container(
-                transaction,
-                container.get("dataSetType"),
-                sample,
-                experiment,
-                parent_datasets,
-                container.get("properties"),
-                dataset_codes
-            )
-            # just return the permId of the container, not of all created dataSets
-            dataset_codes = [new_cont.getDataSetCode()]
-
-    permId = dataset_codes[0]
-
-    # create the dataset
-    if everything_ok:
-        # Success message
-        tableBuilder.addHeader("STATUS")
-        tableBuilder.addHeader("MESSAGE")
-        tableBuilder.addHeader("RESULT")
-        row = tableBuilder.addRow()
-        row.setCell("STATUS","OK")
-        row.setCell("MESSAGE", "Dataset registration successful")
-        row.setCell("RESULT", permId)
-
-    else:
-        # Error message
-        tableBuilder.addHeader("STATUS")
-        tableBuilder.addHeader("MESSAGE")
-        row = tableBuilder.addRow()
-        row.setCell("STATUS","FAIL")
-        row.setCell("MESSAGE", "Dataset registration failed")
-
-
-def register_container(transaction, dataset_type, sample, experiment, parent_datasets, properties, contained_dataset_codes ):
-
-    container_name = properties.get("NAME")
-    #print("check if the JUPYTER_CONTAINER already exists with name: "+ container_name)
-    # make sure container dataset doesn't exist yet
-    container = get_dataset_for_name(transaction, container_name)
-
-    if container is None:
-        #print("creating new JUPYTER_CONTAINER dataset... with name: " + container_name)
-        # Create new container (a dataset of type "JUPYTER_CONTAINER")
-        container = transaction.createNewDataSet(dataset_type)
-        container.setSample(sample)
-        container.setExperiment(experiment)
-        container.setParentDatasets(parent_datasets)
-        #container.setRegistrator(userId)
-    else:
-        print("JUPYTER_CONTAINER already exists: " + container_name)
-    
-    #print("setting properties...")
-    for key in properties.keySet():
-        propertyValue = unicode(properties[key])
-        #print("container: setting "+key+"="+propertyValue)
-
-        if propertyValue == "":
-            propertyValue = None
-        container.setPropertyValue(key,propertyValue)
-    
-    container.setContainedDataSetCodes(contained_dataset_codes)
-    print("JUPYTER_CONTAINER permId: " + container.getDataSetCode())
-
-    return container
-
-
-def register_dataset(transaction, dataset_type, sample, experiment, parentIds, properties, ws_folder, file_names, folder):
-    """ creates a new dataset of a given type.
-    - the result files are copied from the session workspace
-      to a temp dir close to the DSS: prepareFilesForRegistration()
-    - from there, the files are moved to the DSS: transaction.moveFile()
-    - finally, the remaining files are deleted from the session workspace
-    """
-    
-    print("creating dataset of type: " + dataset_type)
-    dataset = transaction.createNewDataSet(dataset_type)
-    dataset.setSample(sample)
-    dataset.setExperiment(experiment)
-
-    parents = []
-    if parentIds is not None:
-        for parentId in parentIds:
-            parents.append(parentId)
-    dataset.setParentDatasets(parents)
-
-    # setting any given properties
-    for key in properties.keySet():
-        propertyValue = unicode(properties[key]);
-        print("setting propertyValue: "+key + " = " + propertyValue)
-        if propertyValue == "":
-            propertyValue = None;
-        dataset.setPropertyValue(key,propertyValue);
-
-    print("dataset created with permId: " + dataset.getDataSetCode())
-    print("workspace folder is: " + ws_folder)
-    
-    # create temporary folder in incoming-dir ( openbis/servers/datastore_server/data/incoming )
-    threadProperties = getThreadProperties(transaction)
-    #incoming_dir =  os.path.join( threadProperties[u'incoming-dir'], str(time.time()) )
-    if folder is None:
-        folder = dataset_type
-    incoming_dir =  os.path.join( threadProperties[u'incoming-dir'], folder )
-    print("incoming folder is: " + incoming_dir)
-
-    dss_service = ServiceProvider.getDssServiceRpcGeneric().getService()
-
-    # copy all files from session workspace to (temporary) incoming directory.
-    for file_name in file_names:
-        ws_file_path = os.path.join(ws_folder, file_name)
-        print("copying file from session workspace: " + ws_file_path)
-        # JUPYTER_RESULT/file_name
-        incoming_file_path = os.path.join(incoming_dir, file_name)
-        print("to incoming: "+incoming_file_path)
-
-        # ensure that all necessary folders exist
-        try:
-            os.makedirs(os.path.dirname(incoming_file_path))
-            print("subdir created: " + os.path.dirname(incoming_file_path))
-        except:
-            pass
-
-
-        # copy files from session user workspace
-        # to incoming path, because they might
-        # not be on the same drive.
-        inputStream = dss_service.getFileFromSessionWorkspace(userSessionToken, ws_file_path)
-        outputStream = FileOutputStream(File(incoming_file_path))
-        IOUtils.copyLarge(inputStream, outputStream)
-        IOUtils.closeQuietly(inputStream)
-        IOUtils.closeQuietly(outputStream)
-
-
-    # there is only one transation move per dataset.
-    print("transaction.moveFile from incoming folder: " + incoming_dir)
-    transaction.moveFile(File(incoming_dir).getAbsolutePath(), dataset, folder);
-
-    # ...and delete all files from the session workspace
-    # TODO: delete it later
-    #dss_service = ServiceProvider.getDssServiceRpcGeneric().getService()
-    #for file_name in file_names:
-    #    file_path = os.path.join(temp_dir, file_name)
-    #    dss_service.deleteSessionWorkspaceFile(userSessionToken, file_name)
-
-    return dataset.getDataSetCode()
-
-
-def getThreadProperties(transaction):
-  threadPropertyDict = {}
-  threadProperties = transaction.getGlobalState().getThreadParameters().getThreadProperties()
-  for key in threadProperties:
-    try:
-      threadPropertyDict[key] = threadProperties.getProperty(key)
-    except:
-      pass
-  return threadPropertyDict
-
-
-def getThreadProperties(transaction):
-  threadPropertyDict = {}
-  threadProperties = transaction.getGlobalState().getThreadParameters().getThreadProperties()
-  for key in threadProperties:
-    try:
-      threadPropertyDict[key] = threadProperties.getProperty(key)
-    except:
-      pass
-  return threadPropertyDict
-
diff --git a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/lib/zip4j_1.3.2.jar b/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/lib/zip4j_1.3.2.jar
deleted file mode 100644
index 66a19e49a2a29b39b76ade1b619de9db0a8fdcaa..0000000000000000000000000000000000000000
Binary files a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/lib/zip4j_1.3.2.jar and /dev/null differ
diff --git a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/plugin.properties b/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/plugin.properties
deleted file mode 100644
index 7793216f064386376370bc47ddc02d3c6acf0593..0000000000000000000000000000000000000000
--- a/src/core-plugins/jupyter-api/1/dss/reporting-plugins/jupyter-uploader-api/plugin.properties
+++ /dev/null
@@ -1,3 +0,0 @@
-label = Dataset Uploader API
-class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.jython.JythonIngestionService
-script-path = dataset-uploader-api.py
diff --git a/src/python/ELNJupyter/README.md b/src/python/ELNJupyter/README.md
deleted file mode 100644
index 9868b86ea7a17cc6e1cd0135fade5d7fbca383e7..0000000000000000000000000000000000000000
--- a/src/python/ELNJupyter/README.md
+++ /dev/null
@@ -1,80 +0,0 @@
-# ELN-Jupyter webservice
-
-This is a small webservice which is running on a server and allows to write Jupyter notebook files directly into the users' home folder.
-
-- always use utf-8
-- always use https
-- the body should contain the content of the Jupyter notebook. 
-- respones are always in JSON
-- if the POST request fails, a response with HTTP status code ≠ 200 is sent back, including an error message
-- the openBIS token is always tested for validity against the openBIS server
-- the user is extracted from the token
-- the user must exist on the server (no automatic user creation)
-- if folder or file exists on the system, a new folder is added with a sequence number, to prevent previous work from being overwritten
-
-## Usage
-
-```
-POST
-https://servername:8123?token=[openBIS token]&folder=my_folder&filename=20160929145446460-369.ipynb
-
-{
-  "cells": [
-    {
-      "cell_type": "code",
-      "execution_count": null,
-      "metadata": {
-        "collapsed": false
-      },
-      "outputs": [],
-      "source": [
-        "from pybis import Openbis\n",
-        "o = Openbis(url='https://localhost:8443', verify_certificates=False)"
-      ]
-    },
-    {
-      "cell_type": "code",
-      "execution_count": null,
-      "metadata": {
-        "collapsed": true
-      },
-      "outputs": [],
-      "source": [
-        "ds = o.get_dataset('20160929145446460-367')"
-      ]
-    }
-  ],
-  "metadata": {
-    "kernelspec": {
-      "display_name": "Python 3",
-      "language": "python",
-      "name": "python3"
-    },
-    "language_info": {
-      "codemirror_mode": {
-        "name": "ipython",
-        "version": 3
-      },
-      "file_extension": ".py",
-      "mimetype": "text/x-python",
-      "name": "python",
-      "nbconvert_exporter": "python",
-      "pygments_lexer": "ipython3",
-      "version": "3.5.2"
-    }
-  },
-  "nbformat": 4,
-  "nbformat_minor": 2
-}
-
-```
-
-## Response from server (not yet implemented)
-```
-{
-    "link": "http://servername:8000/home/testuser/notebooks/my_folder/20160929145446460-369.ipynb"
-}
-```
-
-Using this link, the user should be directly routed to the notebook we just created.
-If the user never used JupyterHub before or is logged out, he is asked for his password before being redirected to the notebook.
\ No newline at end of file
diff --git a/src/python/ELNJupyter/elnjupyter/__init__.py b/src/python/ELNJupyter/elnjupyter/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/src/python/ELNJupyter/elnjupyter/server.py b/src/python/ELNJupyter/elnjupyter/server.py
deleted file mode 100755
index c201df2f7c827615ad5100ea0e069f92378908b5..0000000000000000000000000000000000000000
--- a/src/python/ELNJupyter/elnjupyter/server.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-import tornado.web
-import tornado.ioloop
-import json
-import os
-import pwd
-import ssl
-import sys
-import click
-from pybis import Openbis
-
-
-class CreateNotebook(tornado.web.RequestHandler):
-
-    def set_default_headers(self):
-        self.set_header("Access-Control-Allow-Origin", "*")
-        self.set_header("Access-Control-Allow-Headers", "x-requested-with")
-        self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
-
-    def get(self, whatever):
-        self.send_error(401, message='this webservice does not allow any GET requests.')
-
-    def options(self):
-        # no body
-        self.set_status(204)
-        self.finish()
-
-    def post(self, whatever):
-        test = self.get_argument(name='test')
-        token = self.get_argument(name='token')
-        folder = self.get_argument(name='folder')
-        filename = self.get_argument(name='filename')
-        content = self.request.body
-
-
-        # check if token is still valid
-        if not self.openbis.is_token_valid(token):
-            self.send_error(401, message="token is invalid")
-            return
-
-        # extract username
-        username, code = token.split('-')
-
-        try:
-            user = pwd.getpwnam(username)
-        except KeyError:
-            self.create_user(username)
-            user = pwd.getpwnam(username)
-            #self.send_error(401, message="User {} does not exist on host system".format(username))
-
-        path_to_notebook = os.path.join(
-            user.pw_dir, 
-            folder,
-            filename
-        )
-
-        # create necessary directories
-        os.makedirs(os.path.dirname(path_to_notebook), exist_ok=True)
-        
-        # add sequence to the filename if file already exists
-        filename_name_end = filename.rfind('.')
-        filename_name = filename[:filename_name_end]
-        filename_extension = filename[filename_name_end:]
-        filename_new = filename_name + filename_extension
-
-        path_to_notebook_new = os.path.join(
-            user.pw_dir, 
-            folder,
-            filename_new
-        )
-
-        i = 1
-        while os.path.isfile(path_to_notebook_new):
-            i += 1
-            filename_new = filename_name + " " + str(i) + filename_extension
-            path_to_notebook_new = os.path.join(
-                user.pw_dir, 
-                folder,
-                filename_new
-            )
-        path_to_notebook = path_to_notebook_new
-        
-        if (test == None) or (test == 'False'):
-            with open(path_to_notebook, 'wb') as f:
-                f.write(content)
-            os.chown(path_to_notebook, user.pw_uid, user.pw_gid)
-            os.chmod(path_to_notebook, 0o777)
-            path_to_notebook_folder = os.path.join(
-                user.pw_dir, 
-                folder
-            )
-            os.chmod(path_to_notebook_folder, 0o777)
-            print(path_to_notebook)
-        
-        link_to_notebook = {
-            "fileName": filename_new
-        }
-        self.write(json.dumps(link_to_notebook))
-
-    def create_user(self, username):
-        os.system("useradd " + username)
-
-    def send_error(self, status_code=500, message=""):
-        self.set_status(status_code)
-        self.write(message)
-
-    def initialize(self, openbis):
-        self.openbis = openbis
-        self.set_header('Content-Type', 'application/json')
-
-def make_app(openbis):
-    """All the routing goes here...
-    """
-    app = tornado.web.Application([
-        (r"/(.*)", CreateNotebook, {"openbis": openbis})
-    ])
-    return app
-
-@click.command()
-@click.option('--port', default=8123, help='Port where this server listens to')
-@click.option('--ssl-cert', '--cert', default='/etc/ssl/certs/cert.pem', help='Path to your cert-file in PEM format')
-@click.option('--ssl-key', '--key', default='/etc/ssl/certs/key.pem', help='Path to your key-file in PEM format')
-@click.option('--openbis', help='URL and port of your openBIS installation')
-def start_server(port, cert, key, openbis):
-    o = Openbis(url=openbis, verify_certificates=False)
-
-    application = make_app(o)
-    application.listen(
-        port,
-        ssl_options={
-            "certfile": cert,
-            "keyfile":  key
-        }
-    )
-    tornado.ioloop.IOLoop.current().start()
-
-
-if __name__ == "__main__":
-    start_server()
diff --git a/src/python/ELNJupyter/setup.py b/src/python/ELNJupyter/setup.py
deleted file mode 100644
index 45144e92679dd9edfc3edbb93a892de2568ef5a2..0000000000000000000000000000000000000000
--- a/src/python/ELNJupyter/setup.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-
-from setuptools import setup
-
-setup(
-    name='elnjupyter',
-    version='0.1.4',
-    description='A webservice to create jupyter notebooks in the users home directory',
-    url='https://sissource.ethz.ch/sis/pybis/',
-    author='SIS | ID | ETH Zuerich',
-    author_email='swen@ethz.ch',
-    license='BSD',
-    packages=['elnjupyter'],
-    install_requires=[
-        'tornado',
-    ],
-    zip_safe=False,
-    entry_points='''
-         [console_scripts]
-         elnjupyter=elnjupyter.server:start_server
-    '''
-)
diff --git a/src/python/JupyterBis/jupyterbis/auth.py b/src/python/JupyterBis/jupyterbis/auth.py
deleted file mode 100644
index a885385c4498dff79a5b97bd14dee78176b6e7a2..0000000000000000000000000000000000000000
--- a/src/python/JupyterBis/jupyterbis/auth.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-auth.py
-
-
-Created by Chandrasekhar Ramakrishnan on 2016-05-25.
-Copyright (c) 2016 ETH Zuerich All rights reserved.
-"""
-
-import os
-import re
-
-from jupyterhub.auth import LocalAuthenticator
-from tornado import gen
-from traitlets import Unicode, Bool
-
-from pybis.pybis import Openbis
-
-user_to_openbis_dict = {}
-
-
-class OpenbisAuthenticator(LocalAuthenticator):
-    server_url = Unicode(
-        config=True,
-        help='URL of openBIS server to contact'
-    )
-
-    verify_certificates = Bool(
-        config=True,
-        default_value=True,
-        help='Should certificates be verified? Normally True, but maybe False for debugging.'
-    )
-
-    valid_username_regex = Unicode(
-        r'^[a-z][.a-z0-9_-]*$',
-        config=True,
-        help="""Regex to use to validate usernames before sending to openBIS."""
-    )
-
-
-    @gen.coroutine
-    def authenticate(self, handler, data):
-        username = data['username']
-        password = data['password']
-
-        # Protect against invalid usernames as well as LDAP injection attacks
-        if not re.match(self.valid_username_regex, username):
-            self.log.warn('Invalid username')
-            return None
-
-        # No empty passwords!
-        if password is None or password.strip() == '':
-            self.log.warn('Empty password')
-            return None
-
-
-        openbis = Openbis(self.server_url, verify_certificates=self.verify_certificates)
-        try:
-            # authenticate against openBIS and store the token (if possible)
-            openbis.login(username, password)
-            user_to_openbis_dict[username] = openbis
-            self.refresh_token(username)
-            return username
-        except ValueError as err:
-            self.log.warn(str(err))
-            return None
-
-
-    def refresh_token(self, username):
-        if username in user_to_openbis_dict:
-            openbis = user_to_openbis_dict[username]
-        else:
-            return None
-
-        # user has no home directory yet:
-        # there is no reason to save the token
-        homedir = os.path.expanduser("~"+username)
-        if not os.path.exists(homedir):
-            return None
-
-        # remove existing token
-        parent_folder = os.path.join(homedir, '.pybis' )
-        token_path = openbis.gen_token_path(parent_folder)
-        try:
-            openbis.delete_token(token_path)
-        except:
-            pass
-
-        # save the new token
-        openbis.save_token(
-            token=openbis.token,
-            parent_folder=parent_folder
-        )
-
-        # change the ownership of the token to make sure it is not owned by root
-        change_ownership = "sudo chown %s:%s %s" % (username, username, parent_folder)
-        os.system(change_ownership)
-        change_ownership = "sudo chown %s:%s %s" % (username, username, openbis.token_path)
-        os.system(change_ownership)
-
-
-    def pre_spawn_start(self, user, spawner):
-        """After successful login and creating user on the system,
-        write the token to a file"""
-
-        self.refresh_token(user.name)
-
-
-    def logout_url(self, base_url):
-        ''' Custon logout
-        '''
-        pass
diff --git a/src/python/JupyterBis/setup.py b/src/python/JupyterBis/setup.py
deleted file mode 100644
index 9d5021fb582d6b644e7b88090f5e648e2b1ec40a..0000000000000000000000000000000000000000
--- a/src/python/JupyterBis/setup.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-
-from setuptools import setup
-
-setup(name='jupyterbis',
-      version='0.1.0',
-      description='A package that allows integration between Jupyter and openBIS.',
-      url='https://sissource.ethz.ch/sis/pybis/',
-      author='SIS | ID |ETH Zuerich',
-      author_email='chandrasekhar.ramakrishnan@id.ethz.ch',
-      license='BSD',
-      packages=['jupyterbis'],
-      install_requires=[
-          'pytest',
-          'jupyterhub',
-          'pybis'
-      ],
-      zip_safe=True)
diff --git a/src/python/PyBis/pybis/pybis.py b/src/python/PyBis/pybis/pybis.py
index a39c1c95079a3027629d28e91bfd37bedc7b7670..514481af8aee0dbd380b4051136328ed4abcbb8d 100644
--- a/src/python/PyBis/pybis/pybis.py
+++ b/src/python/PyBis/pybis/pybis.py
@@ -42,7 +42,7 @@ from queue import Queue
 
 from datetime import datetime
 
-PYBIS_PLUGIN = "jupyter-uploader-api"
+PYBIS_PLUGIN = "dataset-uploader-api"
 
 
 def _definitions(entity):
@@ -627,9 +627,13 @@ class Openbis:
             "get_project('project')",
             "get_projects(space=None, code=None)",
             "get_sample('id')",
+            "get_object('id')", # "get_sample('id')" alias
             "get_samples()",
+            "get_objects()", # "get_samples()" alias
             "get_sample_type(type))",
+            "get_object_type(type))", # "get_sample_type(type))" alias
             "get_sample_types()",
+            "get_object_types()", # "get_sample_types()" alias
             "get_semantic_annotations()",
             "get_semantic_annotation(permId, only_data = False)",
             "get_space(code)",
@@ -640,8 +644,11 @@ class Openbis:
             'new_project(space, code, description)',
             'new_experiment(type, code, project, props={})',
             'new_sample(type, space, project, experiment)',
+            'new_object(type, space, project, experiment)', # 'new_sample(type, space, project, experiment)' alias
             'new_dataset(type, parent, experiment, sample, files=[], folder, props={})',
             'new_semantic_annotation(entityType, propertyType)',
+            'update_sample(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)',
+            'update_object(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)', # 'update_sample(sampleId, space, project, experiment, parents, children, components, properties, tagIds, attachments)' alias
         ]
 
     @property
@@ -972,6 +979,8 @@ class Openbis:
         ss = samples[attrs]
         return Things(self, 'sample', ss, 'identifier')
 
+    get_objects = get_samples # Alias
+
     def get_experiments(self, code=None, type=None, space=None, project=None, tags=None, is_finished=None, props=None,
                         **properties):
         """ Get a list of all experiment for a given space or project (or any combination)
@@ -1226,6 +1235,8 @@ class Openbis:
         resp = self._post_request(self.as_v3, request)
         return self.get_sample(resp[0]['permId'])
 
+    create_object = create_sample # Alias
+
     def create_external_data_management_system(self, code, label, address, address_type='FILE_SYSTEM'):
         """Create an external DMS.
         :param code: An openBIS code for the external DMS.
@@ -1281,6 +1292,8 @@ class Openbis:
         }
         self._post_request(self.as_v3, request)
 
+    update_object = update_sample # Alias
+
     def delete_entity(self, entity, permid, reason, capitalize=True):
         """Deletes Spaces, Projects, Experiments, Samples and DataSets
         """
@@ -1579,6 +1592,8 @@ class Openbis:
             ["generatedCodePrefix"]
         )
 
+    get_object_types = get_sample_types # Alias
+
     def get_sample_type(self, type):
         try:
             return self._get_types_of(
@@ -1590,6 +1605,8 @@ class Openbis:
         except Exception:
             raise ValueError("no such sample type: {}".format(type))
 
+    get_object_type = get_sample_type # Alias
+
     def get_experiment_types(self, type=None):
         """ Returns a list of all available experiment types
         """
@@ -1794,6 +1811,8 @@ class Openbis:
                 else:
                     return Sample(self, self.get_sample_type(resp[sample_ident]["type"]["code"]), resp[sample_ident])
 
+    get_object = get_sample # Alias
+
     def get_external_data_management_system(self, permId, only_data=False):
         """Retrieve metadata for the external data management system.
         :param permId: A permId for an external DMS.
@@ -1999,6 +2018,8 @@ class Openbis:
         """
         return Sample(self, self.get_sample_type(type), None, props, **kwargs)
 
+    new_object = new_sample # Alias
+
     def new_dataset(self, type=None, files=None, props=None, folder=None, **kwargs):
         """ Creates a new dataset of a given sample type.
         """
@@ -2219,6 +2240,8 @@ class OpenBisObject():
         except Exception:
             pass
 
+    object = sample # Alias
+
     def __getattr__(self, name):
         return getattr(self.__dict__['a'], name)
 
@@ -2530,7 +2553,7 @@ class DataSet(OpenBisObject):
         parentIds = self.parents
 
         dataset_type = self.type.code
-        metadata = self.props.all_nonempty()
+        properties = self.props.all_nonempty()
 
         request = {
             "method": "createReportFromAggregationService",
@@ -2539,16 +2562,15 @@ class DataSet(OpenBisObject):
                 dss,
                 PYBIS_PLUGIN,
                 {
-                    "sampleId": sample_identifier,
-                    "experimentId": experiment_identifier,
-                    "dataSets": [ {
-                        "dataSetType": dataset_type,
-                        "folder": self.folder,
-                        "sessionWorkspaceFolder": "",
-                        "fileNames": self.files,
-                        "properties": metadata,
-                        "parentIds": parentIds
-                    } ]
+                    "method" : "insertDataSet",
+                    "sampleIdentifier" : sample_identifier,
+                    "experimentIdentifier" : experiment_identifier,
+                    "dataSetType" : dataset_type,
+                    "folderName" : self.folder,
+                    "fileNames" : self.files,
+                    "isZipDirectoryUpload" : False,
+                    "properties" : properties,
+                    "parentIdentifiers": parentIds
                 }
             ],
         }
@@ -3358,6 +3380,8 @@ class Space(OpenBisObject):
     def get_samples(self, **kwargs):
         return self.openbis.get_samples(space=self.code, **kwargs)
 
+    get_objects = get_samples #Alias
+
     def get_projects(self, **kwargs):
         return self.openbis.get_projects(space=self.code, **kwargs)
 
@@ -3489,6 +3513,8 @@ class Things():
             else:
                 return Things(self.openbis, 'sample', DataFrame(), 'identifier')
 
+    get_objects = get_samples # Alias
+
     def get_datasets(self, **kwargs):
         if self.entity not in ['sample', 'experiment']:
             raise ValueError("{}s do not have datasets".format(self.entity))
@@ -3657,6 +3683,8 @@ class Experiment(OpenBisObject):
             return None
         return self.openbis.get_samples(experiment=self.permId, **kwargs)
 
+    get_objects = get_samples # Alias
+
     def add_samples(self, *samples):
 
         for sample in samples:
@@ -3682,6 +3710,7 @@ class Experiment(OpenBisObject):
                     obj.save()
                     self.a.__dict__['_samples'].append(obj._identifier)
 
+    add_objects = add_samples # Alias
 
     def del_samples(self, samples):
         if not isinstance(samples, list):
@@ -3698,6 +3727,8 @@ class Experiment(OpenBisObject):
         
         self.samples = objects
 
+    del_objects = del_samples # Alias
+
 class Attachment():
     def __init__(self, filename, title=None, description=None):
         if not os.path.exists(filename):
@@ -3756,6 +3787,8 @@ class Project(OpenBisObject):
     def get_samples(self, **kwargs):
         return self.openbis.get_samples(project=self.permId, **kwargs)
 
+    get_objects = get_samples # Alias
+
     def get_experiments(self):
         return self.openbis.get_experiments(project=self.permId)